CINXE.COM

Image Processing Research Papers - Academia.edu

<!DOCTYPE html> <html lang="en" xmlns:fb="http://www.facebook.com/2008/fbml" class="wf-loading"> <head prefix="og: https://ogp.me/ns# fb: https://ogp.me/ns/fb# academia: https://ogp.me/ns/fb/academia#"> <meta charset="utf-8"> <meta name=viewport content="width=device-width, initial-scale=1"> <meta rel="search" type="application/opensearchdescription+xml" href="/open_search.xml" title="Academia.edu"> <title>Image Processing Research Papers - Academia.edu</title> <!-- _ _ _ | | (_) | | __ _ ___ __ _ __| | ___ _ __ ___ _ __ _ ___ __| |_ _ / _` |/ __/ _` |/ _` |/ _ \ '_ ` _ \| |/ _` | / _ \/ _` | | | | | (_| | (_| (_| | (_| | __/ | | | | | | (_| || __/ (_| | |_| | \__,_|\___\__,_|\__,_|\___|_| |_| |_|_|\__,_(_)___|\__,_|\__,_| We're hiring! See https://www.academia.edu/hiring --> <link href="//a.academia-assets.com/images/favicons/favicon-production.ico" rel="shortcut icon" type="image/vnd.microsoft.icon"> <link rel="apple-touch-icon" sizes="57x57" href="//a.academia-assets.com/images/favicons/apple-touch-icon-57x57.png"> <link rel="apple-touch-icon" sizes="60x60" href="//a.academia-assets.com/images/favicons/apple-touch-icon-60x60.png"> <link rel="apple-touch-icon" sizes="72x72" href="//a.academia-assets.com/images/favicons/apple-touch-icon-72x72.png"> <link rel="apple-touch-icon" sizes="76x76" href="//a.academia-assets.com/images/favicons/apple-touch-icon-76x76.png"> <link rel="apple-touch-icon" sizes="114x114" href="//a.academia-assets.com/images/favicons/apple-touch-icon-114x114.png"> <link rel="apple-touch-icon" sizes="120x120" href="//a.academia-assets.com/images/favicons/apple-touch-icon-120x120.png"> <link rel="apple-touch-icon" sizes="144x144" href="//a.academia-assets.com/images/favicons/apple-touch-icon-144x144.png"> <link rel="apple-touch-icon" sizes="152x152" href="//a.academia-assets.com/images/favicons/apple-touch-icon-152x152.png"> <link rel="apple-touch-icon" sizes="180x180" href="//a.academia-assets.com/images/favicons/apple-touch-icon-180x180.png"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-32x32.png" sizes="32x32"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-194x194.png" sizes="194x194"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-96x96.png" sizes="96x96"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/android-chrome-192x192.png" sizes="192x192"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-16x16.png" sizes="16x16"> <link rel="manifest" href="//a.academia-assets.com/images/favicons/manifest.json"> <meta name="msapplication-TileColor" content="#2b5797"> <meta name="msapplication-TileImage" content="//a.academia-assets.com/images/favicons/mstile-144x144.png"> <meta name="theme-color" content="#ffffff"> <script> window.performance && window.performance.measure && window.performance.measure("Time To First Byte", "requestStart", "responseStart"); </script> <script> (function() { if (!window.URLSearchParams || !window.history || !window.history.replaceState) { return; } var searchParams = new URLSearchParams(window.location.search); var paramsToDelete = [ 'fs', 'sm', 'swp', 'iid', 'nbs', 'rcc', // related content category 'rcpos', // related content carousel position 'rcpg', // related carousel page 'rchid', // related content hit id 'f_ri', // research interest id, for SEO tracking 'f_fri', // featured research interest, for SEO tracking (param key without value) 'f_rid', // from research interest directory for SEO tracking 'f_loswp', // from research interest pills on LOSWP sidebar for SEO tracking 'rhid', // referrring hit id ]; if (paramsToDelete.every((key) => searchParams.get(key) === null)) { return; } paramsToDelete.forEach((key) => { searchParams.delete(key); }); var cleanUrl = new URL(window.location.href); cleanUrl.search = searchParams.toString(); history.replaceState({}, document.title, cleanUrl); })(); </script> <script async src="https://www.googletagmanager.com/gtag/js?id=G-5VKX33P2DS"></script> <script> window.dataLayer = window.dataLayer || []; function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-5VKX33P2DS', { cookie_domain: 'academia.edu', send_page_view: false, }); gtag('event', 'page_view', { 'controller': "by_tag", 'action': "show_one", 'controller_action': 'by_tag#show_one', 'logged_in': 'false', 'edge': 'unknown', // Send nil if there is no A/B test bucket, in case some records get logged // with missing data - that way we can distinguish between the two cases. // ab_test_bucket should be of the form <ab_test_name>:<bucket> 'ab_test_bucket': null, }) </script> <script type="text/javascript"> window.sendUserTiming = function(timingName) { if (!(window.performance && window.performance.measure)) return; var entries = window.performance.getEntriesByName(timingName, "measure"); if (entries.length !== 1) return; var timingValue = Math.round(entries[0].duration); gtag('event', 'timing_complete', { name: timingName, value: timingValue, event_category: 'User-centric', }); }; window.sendUserTiming("Time To First Byte"); </script> <meta name="csrf-param" content="authenticity_token" /> <meta name="csrf-token" content="B2e3Brweqb2tT8HUPbkDH2Pjr7a6j_wdTojlQPML7I4zKAnlQ-2aUaslXoKcaIScx64998qeCkR1FPjma8mxtA" /> <link href="/Documents/in/Image_Processing?after=50%2C18650509" rel="next" /><link crossorigin="" href="https://fonts.gstatic.com/" rel="preconnect" /><link href="https://fonts.googleapis.com/css2?family=DM+Sans:ital,opsz,wght@0,9..40,100..1000;1,9..40,100..1000&amp;family=Gupter:wght@400;500;700&amp;family=IBM+Plex+Mono:wght@300;400&amp;family=Material+Symbols+Outlined:opsz,wght,FILL,GRAD@20,400,0,0&amp;display=swap" rel="stylesheet" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/common-2b6f90dbd75f5941bc38f4ad716615f3ac449e7398313bb3bc225fba451cd9fa.css" /> <meta name="description" content="View Image Processing Research Papers on Academia.edu for free." /> <meta name="google-site-verification" content="bKJMBZA7E43xhDOopFZkssMMkBRjvYERV-NaN4R6mrs" /> <script> var $controller_name = 'by_tag'; var $action_name = "show_one"; var $rails_env = 'production'; var $app_rev = 'dc2ad41da5d7ea682babd20f90650302fb0a3a36'; var $domain = 'academia.edu'; var $app_host = "academia.edu"; var $asset_host = "academia-assets.com"; var $start_time = new Date().getTime(); var $recaptcha_key = "6LdxlRMTAAAAADnu_zyLhLg0YF9uACwz78shpjJB"; var $recaptcha_invisible_key = "6Lf3KHUUAAAAACggoMpmGJdQDtiyrjVlvGJ6BbAj"; var $disableClientRecordHit = false; </script> <script> window.Aedu = { hit_data: null }; window.Aedu.SiteStats = {"premium_universities_count":14020,"monthly_visitors":"97 million","monthly_visitor_count":97692212,"monthly_visitor_count_in_millions":97,"user_count":282904512,"paper_count":55203019,"paper_count_in_millions":55,"page_count":432000000,"page_count_in_millions":432,"pdf_count":16500000,"pdf_count_in_millions":16}; window.Aedu.serverRenderTime = new Date(1739712036000); window.Aedu.timeDifference = new Date().getTime() - 1739712036000; window.Aedu.isUsingCssV1 = false; window.Aedu.enableLocalization = true; window.Aedu.activateFullstory = false; window.Aedu.serviceAvailability = { status: {"attention_db":"on","bibliography_db":"on","contacts_db":"on","email_db":"on","indexability_db":"on","mentions_db":"on","news_db":"on","notifications_db":"on","offsite_mentions_db":"on","redshift":"on","redshift_exports_db":"on","related_works_db":"on","ring_db":"on","user_tests_db":"on"}, serviceEnabled: function(service) { return this.status[service] === "on"; }, readEnabled: function(service) { return this.serviceEnabled(service) || this.status[service] === "read_only"; }, }; window.Aedu.viewApmTrace = function() { // Check if x-apm-trace-id meta tag is set, and open the trace in APM // in a new window if it is. var apmTraceId = document.head.querySelector('meta[name="x-apm-trace-id"]'); if (apmTraceId) { var traceId = apmTraceId.content; // Use trace ID to construct URL, an example URL looks like: // https://app.datadoghq.com/apm/traces?query=trace_id%31298410148923562634 var apmUrl = 'https://app.datadoghq.com/apm/traces?query=trace_id%3A' + traceId; window.open(apmUrl, '_blank'); } }; </script> <!--[if lt IE 9]> <script src="//cdnjs.cloudflare.com/ajax/libs/html5shiv/3.7.2/html5shiv.min.js"></script> <![endif]--> <link href="https://fonts.googleapis.com/css?family=Roboto:100,100i,300,300i,400,400i,500,500i,700,700i,900,900i" rel="stylesheet"> <link rel="preload" href="//maxcdn.bootstrapcdn.com/font-awesome/4.3.0/css/font-awesome.min.css" as="style" onload="this.rel='stylesheet'"> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/libraries-a9675dcb01ec4ef6aa807ba772c7a5a00c1820d3ff661c1038a20f80d06bb4e4.css" /> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/academia-40698df34f913bd208bb70f09d2feb7c6286046250be17a4db35bba2c08b0e2f.css" /> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system_legacy-056a9113b9a0f5343d013b29ee1929d5a18be35fdcdceb616600b4db8bd20054.css" /> <script src="//a.academia-assets.com/assets/webpack_bundles/runtime-bundle-005434038af4252ca37c527588411a3d6a0eabb5f727fac83f8bbe7fd88d93bb.js"></script> <script src="//a.academia-assets.com/assets/webpack_bundles/webpack_libraries_and_infrequently_changed.wjs-bundle-a22f75d8519394c21253dae46c8c5d60ad36ea68c7d494347ec64229d8c1cf85.js"></script> <script src="//a.academia-assets.com/assets/webpack_bundles/core_webpack.wjs-bundle-5708a105dd66b4c7d0ef30b7c094b1048423f0042bd2a7b123f2d99ee3cf46d9.js"></script> <script src="//a.academia-assets.com/assets/webpack_bundles/sentry.wjs-bundle-5fe03fddca915c8ba0f7edbe64c194308e8ce5abaed7bffe1255ff37549c4808.js"></script> <script> jade = window.jade || {}; jade.helpers = window.$h; jade._ = window._; </script> <!-- Google Tag Manager --> <script id="tag-manager-head-root">(function(w,d,s,l,i){w[l]=w[l]||[];w[l].push({'gtm.start': new Date().getTime(),event:'gtm.js'});var f=d.getElementsByTagName(s)[0], j=d.createElement(s),dl=l!='dataLayer'?'&l='+l:'';j.async=true;j.src= 'https://www.googletagmanager.com/gtm.js?id='+i+dl;f.parentNode.insertBefore(j,f); })(window,document,'script','dataLayer_old','GTM-5G9JF7Z');</script> <!-- End Google Tag Manager --> <script> window.gptadslots = []; window.googletag = window.googletag || {}; window.googletag.cmd = window.googletag.cmd || []; </script> <script type="text/javascript"> // TODO(jacob): This should be defined, may be rare load order problem. // Checking if null is just a quick fix, will default to en if unset. // Better fix is to run this immedietely after I18n is set. if (window.I18n != null) { I18n.defaultLocale = "en"; I18n.locale = "en"; I18n.fallbacks = true; } </script> <link rel="canonical" href="https://www.academia.edu/Documents/in/Image_Processing" /> </head> <!--[if gte IE 9 ]> <body class='ie ie9 c-by_tag a-show_one logged_out u-bgColorWhite'> <![endif]--> <!--[if !(IE) ]><!--> <body class='c-by_tag a-show_one logged_out u-bgColorWhite'> <!--<![endif]--> <div id="fb-root"></div><script>window.fbAsyncInit = function() { FB.init({ appId: "2369844204", version: "v8.0", status: true, cookie: true, xfbml: true }); // Additional initialization code. if (window.InitFacebook) { // facebook.ts already loaded, set it up. window.InitFacebook(); } else { // Set a flag for facebook.ts to find when it loads. window.academiaAuthReadyFacebook = true; } };</script><script>window.fbAsyncLoad = function() { // Protection against double calling of this function if (window.FB) { return; } (function(d, s, id){ var js, fjs = d.getElementsByTagName(s)[0]; if (d.getElementById(id)) {return;} js = d.createElement(s); js.id = id; js.src = "//connect.facebook.net/en_US/sdk.js"; fjs.parentNode.insertBefore(js, fjs); }(document, 'script', 'facebook-jssdk')); } if (!window.defer_facebook) { // Autoload if not deferred window.fbAsyncLoad(); } else { // Defer loading by 5 seconds setTimeout(function() { window.fbAsyncLoad(); }, 5000); }</script> <div id="google-root"></div><script>window.loadGoogle = function() { if (window.InitGoogle) { // google.ts already loaded, set it up. window.InitGoogle("331998490334-rsn3chp12mbkiqhl6e7lu2q0mlbu0f1b"); } else { // Set a flag for google.ts to use when it loads. window.GoogleClientID = "331998490334-rsn3chp12mbkiqhl6e7lu2q0mlbu0f1b"; } };</script><script>window.googleAsyncLoad = function() { // Protection against double calling of this function (function(d) { var js; var id = 'google-jssdk'; var ref = d.getElementsByTagName('script')[0]; if (d.getElementById(id)) { return; } js = d.createElement('script'); js.id = id; js.async = true; js.onload = loadGoogle; js.src = "https://accounts.google.com/gsi/client" ref.parentNode.insertBefore(js, ref); }(document)); } if (!window.defer_google) { // Autoload if not deferred window.googleAsyncLoad(); } else { // Defer loading by 5 seconds setTimeout(function() { window.googleAsyncLoad(); }, 5000); }</script> <div id="tag-manager-body-root"> <!-- Google Tag Manager (noscript) --> <noscript><iframe src="https://www.googletagmanager.com/ns.html?id=GTM-5G9JF7Z" height="0" width="0" style="display:none;visibility:hidden"></iframe></noscript> <!-- End Google Tag Manager (noscript) --> <!-- Event listeners for analytics --> <script> window.addEventListener('load', function() { if (document.querySelector('input[name="commit"]')) { document.querySelector('input[name="commit"]').addEventListener('click', function() { gtag('event', 'click', { event_category: 'button', event_label: 'Log In' }) }) } }); </script> </div> <script>var _comscore = _comscore || []; _comscore.push({ c1: "2", c2: "26766707" }); (function() { var s = document.createElement("script"), el = document.getElementsByTagName("script")[0]; s.async = true; s.src = (document.location.protocol == "https:" ? "https://sb" : "http://b") + ".scorecardresearch.com/beacon.js"; el.parentNode.insertBefore(s, el); })();</script><img src="https://sb.scorecardresearch.com/p?c1=2&amp;c2=26766707&amp;cv=2.0&amp;cj=1" style="position: absolute; visibility: hidden" /> <div id='react-modal'></div> <div class='DesignSystem'> <a class='u-showOnFocus' href='#site'> Skip to main content </a> </div> <div id="upgrade_ie_banner" style="display: none;"><p>Academia.edu no longer supports Internet Explorer.</p><p>To browse Academia.edu and the wider internet faster and more securely, please take a few seconds to&nbsp;<a href="https://www.academia.edu/upgrade-browser">upgrade your browser</a>.</p></div><script>// Show this banner for all versions of IE if (!!window.MSInputMethodContext || /(MSIE)/.test(navigator.userAgent)) { document.getElementById('upgrade_ie_banner').style.display = 'block'; }</script> <div class="DesignSystem bootstrap ShrinkableNav no-sm no-md"><div class="navbar navbar-default main-header"><div class="container-wrapper" id="main-header-container"><div class="container"><div class="navbar-header"><div class="nav-left-wrapper u-mt0x"><div class="nav-logo"><a data-main-header-link-target="logo_home" href="https://www.academia.edu/"><img class="visible-xs-inline-block" style="height: 24px;" alt="Academia.edu" src="//a.academia-assets.com/images/academia-logo-redesign-2015-A.svg" width="24" height="24" /><img width="145.2" height="18" class="hidden-xs" style="height: 24px;" alt="Academia.edu" src="//a.academia-assets.com/images/academia-logo-redesign-2015.svg" /></a></div><div class="nav-search"><div class="SiteSearch-wrapper select2-no-default-pills"><form class="js-SiteSearch-form DesignSystem" action="https://www.academia.edu/search" accept-charset="UTF-8" method="get"><i class="SiteSearch-icon fa fa-search u-fw700 u-positionAbsolute u-tcGrayDark"></i><input class="js-SiteSearch-form-input SiteSearch-form-input form-control" data-main-header-click-target="search_input" name="q" placeholder="Search" type="text" value="" /></form></div></div></div><div class="nav-right-wrapper pull-right"><ul class="NavLinks js-main-nav list-unstyled"><li class="NavLinks-link"><a class="js-header-login-url Button Button--inverseGray Button--sm u-mb4x" id="nav_log_in" rel="nofollow" href="https://www.academia.edu/login">Log In</a></li><li class="NavLinks-link u-p0x"><a class="Button Button--inverseGray Button--sm u-mb4x" rel="nofollow" href="https://www.academia.edu/signup">Sign Up</a></li></ul><button class="hidden-lg hidden-md hidden-sm u-ml4x navbar-toggle collapsed" data-target=".js-mobile-header-links" data-toggle="collapse" type="button"><span class="icon-bar"></span><span class="icon-bar"></span><span class="icon-bar"></span></button></div></div><div class="collapse navbar-collapse js-mobile-header-links"><ul class="nav navbar-nav"><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/login">Log In</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/signup">Sign Up</a></li><li class="u-borderColorGrayLight u-borderBottom1 js-mobile-nav-expand-trigger"><a href="#">more&nbsp<span class="caret"></span></a></li><li><ul class="js-mobile-nav-expand-section nav navbar-nav u-m0x collapse"><li class="u-borderColorGrayLight u-borderBottom1"><a rel="false" href="https://www.academia.edu/about">About</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/press">Press</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="false" href="https://www.academia.edu/documents">Papers</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/terms">Terms</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/privacy">Privacy</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/copyright">Copyright</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/hiring"><i class="fa fa-briefcase"></i>&nbsp;We're Hiring!</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://support.academia.edu/hc/en-us"><i class="fa fa-question-circle"></i>&nbsp;Help Center</a></li><li class="js-mobile-nav-collapse-trigger u-borderColorGrayLight u-borderBottom1 dropup" style="display:none"><a href="#">less&nbsp<span class="caret"></span></a></li></ul></li></ul></div></div></div><script>(function(){ var $moreLink = $(".js-mobile-nav-expand-trigger"); var $lessLink = $(".js-mobile-nav-collapse-trigger"); var $section = $('.js-mobile-nav-expand-section'); $moreLink.click(function(ev){ ev.preventDefault(); $moreLink.hide(); $lessLink.show(); $section.collapse('show'); }); $lessLink.click(function(ev){ ev.preventDefault(); $moreLink.show(); $lessLink.hide(); $section.collapse('hide'); }); })() if ($a.is_logged_in() || false) { new Aedu.NavigationController({ el: '.js-main-nav', showHighlightedNotification: false }); } else { $(".js-header-login-url").attr("href", $a.loginUrlWithRedirect()); } Aedu.autocompleteSearch = new AutocompleteSearch({el: '.js-SiteSearch-form'});</script></div></div> <div id='site' class='fixed'> <div id="content" class="clearfix"> <script>document.addEventListener('DOMContentLoaded', function(){ var $dismissible = $(".dismissible_banner"); $dismissible.click(function(ev) { $dismissible.hide(); }); });</script> <div class="DesignSystem" style="margin-top:-40px"><div class="PageHeader"><div class="container"><div class="row"><style type="text/css">.sor-abstract { display: -webkit-box; overflow: hidden; text-overflow: ellipsis; -webkit-line-clamp: 3; -webkit-box-orient: vertical; }</style><div class="col-xs-12 clearfix"><div class="u-floatLeft"><h1 class="PageHeader-title u-m0x u-fs30">Image Processing</h1><div class="u-tcGrayDark">440,365&nbsp;Followers</div><div class="u-tcGrayDark u-mt2x">Recent papers in&nbsp;<b>Image Processing</b></div></div></div></div></div></div><div class="TabbedNavigation"><div class="container"><div class="row"><div class="col-xs-12 clearfix"><ul class="nav u-m0x u-p0x list-inline u-displayFlex"><li class="active"><a href="https://www.academia.edu/Documents/in/Image_Processing">Top Papers</a></li><li><a href="https://www.academia.edu/Documents/in/Image_Processing/MostCited">Most Cited Papers</a></li><li><a href="https://www.academia.edu/Documents/in/Image_Processing/MostDownloaded">Most Downloaded Papers</a></li><li><a href="https://www.academia.edu/Documents/in/Image_Processing/MostRecent">Newest Papers</a></li><li><a class="" href="https://www.academia.edu/People/Image_Processing">People</a></li></ul></div><style type="text/css">ul.nav{flex-direction:row}@media(max-width: 567px){ul.nav{flex-direction:column}.TabbedNavigation li{max-width:100%}.TabbedNavigation li.active{background-color:var(--background-grey, #dddde2)}.TabbedNavigation li.active:before,.TabbedNavigation li.active:after{display:none}}</style></div></div></div><div class="container"><div class="row"><div class="col-xs-12"><div class="u-displayFlex"><div class="u-flexGrow1"><div class="works"><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_26310127" data-work_id="26310127" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/26310127/Image_reconstruction_methods_in_positron_tomography">Image reconstruction methods in positron tomography</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">In the two decades since the introduction of the X-ray scanner into radiology, medical imaging techniques have become widely established as essential tools in the diagnosis of disease. As a consequence of recent technological and... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_26310127" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">In the two decades since the introduction of the X-ray scanner into radiology, medical imaging techniques have become widely established as essential tools in the diagnosis of disease. As a consequence of recent technological and mathematical advances, the non invasive, three-dimensional imaging of internal organs such as the brain and the heart is now possible, not only for anatomical investigations using X-rays but also for studies which explore the functional status of the body using positron-emitting radioisotopes. This report reviews the historical and physical basis of medical imaging techniques using positron-emitting radioisotopes. Mathematical methods which enable three-dimensional distributions of radioisotopes to be reconstructed from projection data (sinograms) acquired by detectors suitably positioned around the patient are discussed. The extension of conventional twodimensional tomographic reconstruction algorithms to fully three-dimensional reconstruction is described in detail. © Parallel projection Eqn. (3.1), &lt;\&gt; = 0 p(s, 0 = 0) © Filtering Eqn. (3.2) Two disks equal concentrations À © Backprojection Eqn. (3.5)</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/26310127" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="90fd04a2b3fcd1c9061b6003c470cd4d" rel="nofollow" data-download="{&quot;attachment_id&quot;:46623224,&quot;asset_id&quot;:26310127,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/46623224/download_file?st=MTczOTcxMjAzNSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="50232202" href="https://nus.academia.edu/DavidTownsend">David Townsend</a><script data-card-contents-for-user="50232202" type="text/json">{"id":50232202,"first_name":"David","last_name":"Townsend","domain_name":"nus","page_name":"DavidTownsend","display_name":"David Townsend","profile_url":"https://nus.academia.edu/DavidTownsend?f_ri=1185","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_26310127 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="26310127"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 26310127, container: ".js-paper-rank-work_26310127", }); });</script></li><li class="js-percentile-work_26310127 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 26310127; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_26310127"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_26310127 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="26310127"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 26310127; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=26310127]").text(description); $(".js-view-count-work_26310127").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_26310127").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="26310127"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">12</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="428" rel="nofollow" href="https://www.academia.edu/Documents/in/Algorithms">Algorithms</a>,&nbsp;<script data-card-contents-for-ri="428" type="text/json">{"id":428,"name":"Algorithms","url":"https://www.academia.edu/Documents/in/Algorithms?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5057" rel="nofollow" href="https://www.academia.edu/Documents/in/Iterative_Methods">Iterative Methods</a>,&nbsp;<script data-card-contents-for-ri="5057" type="text/json">{"id":5057,"name":"Iterative Methods","url":"https://www.academia.edu/Documents/in/Iterative_Methods?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="35287" rel="nofollow" href="https://www.academia.edu/Documents/in/DISTRIBUTION">DISTRIBUTION</a><script data-card-contents-for-ri="35287" type="text/json">{"id":35287,"name":"DISTRIBUTION","url":"https://www.academia.edu/Documents/in/DISTRIBUTION?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=26310127]'), work: {"id":26310127,"title":"Image reconstruction methods in positron tomography","created_at":"2016-06-19T12:27:31.312-07:00","url":"https://www.academia.edu/26310127/Image_reconstruction_methods_in_positron_tomography?f_ri=1185","dom_id":"work_26310127","summary":"In the two decades since the introduction of the X-ray scanner into radiology, medical imaging techniques have become widely established as essential tools in the diagnosis of disease. As a consequence of recent technological and mathematical advances, the non invasive, three-dimensional imaging of internal organs such as the brain and the heart is now possible, not only for anatomical investigations using X-rays but also for studies which explore the functional status of the body using positron-emitting radioisotopes. This report reviews the historical and physical basis of medical imaging techniques using positron-emitting radioisotopes. Mathematical methods which enable three-dimensional distributions of radioisotopes to be reconstructed from projection data (sinograms) acquired by detectors suitably positioned around the patient are discussed. The extension of conventional twodimensional tomographic reconstruction algorithms to fully three-dimensional reconstruction is described in detail. © Parallel projection Eqn. (3.1), \u003c\\\u003e = 0 p(s, 0 = 0) © Filtering Eqn. (3.2) Two disks equal concentrations À © Backprojection Eqn. (3.5)","downloadable_attachments":[{"id":46623224,"asset_id":26310127,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":50232202,"first_name":"David","last_name":"Townsend","domain_name":"nus","page_name":"DavidTownsend","display_name":"David Townsend","profile_url":"https://nus.academia.edu/DavidTownsend?f_ri=1185","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":428,"name":"Algorithms","url":"https://www.academia.edu/Documents/in/Algorithms?f_ri=1185","nofollow":true},{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":5057,"name":"Iterative Methods","url":"https://www.academia.edu/Documents/in/Iterative_Methods?f_ri=1185","nofollow":true},{"id":35287,"name":"DISTRIBUTION","url":"https://www.academia.edu/Documents/in/DISTRIBUTION?f_ri=1185","nofollow":true},{"id":36022,"name":"Tomography","url":"https://www.academia.edu/Documents/in/Tomography?f_ri=1185"},{"id":53292,"name":"Programming","url":"https://www.academia.edu/Documents/in/Programming?f_ri=1185"},{"id":83087,"name":"Isotopes","url":"https://www.academia.edu/Documents/in/Isotopes?f_ri=1185"},{"id":92288,"name":"Beta decay","url":"https://www.academia.edu/Documents/in/Beta_decay?f_ri=1185"},{"id":104336,"name":"Data acquisition","url":"https://www.academia.edu/Documents/in/Data_acquisition?f_ri=1185"},{"id":227567,"name":"Decay","url":"https://www.academia.edu/Documents/in/Decay?f_ri=1185"},{"id":637974,"name":"Distribution","url":"https://www.academia.edu/Documents/in/Distribution-3?f_ri=1185"},{"id":967378,"name":"Radioisotopes","url":"https://www.academia.edu/Documents/in/Radioisotopes?f_ri=1185"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_10826863" data-work_id="10826863" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/10826863/Multi_column_Deep_Neural_Networks_for_Image_Classification">Multi-column Deep Neural Networks for Image Classification</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Traditional methods of computer vision and machine learning cannot match human performance on tasks such as the recognition of handwritten digits or traffic signs. Our biologically plausible, wide and deep artificial neural network... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_10826863" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Traditional methods of computer vision and machine learning cannot match human performance on tasks such as the recognition of handwritten digits or traffic signs. Our biologically plausible, wide and deep artificial neural network architectures can. Small (often minimal) receptive fields of convolutional winner-take-all neurons yield large network depth, resulting in roughly as many sparsely connected neural layers as found in mammals between retina and visual cortex. Only winner neurons are trained. Several deep neural columns become experts on inputs preprocessed in different ways; their predictions are averaged. Graphics cards allow for fast training. On the very competitive MNIST handwriting benchmark, our method is the first to achieve near-human performance. On a traffic sign recognition benchmark it outperforms humans by a factor of two. We also improve the state-of-the-art on a plethora of common image classification benchmarks.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/10826863" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="2c4f7352f1cbe076f3f959e1d8a549e0" rel="nofollow" data-download="{&quot;attachment_id&quot;:36650987,&quot;asset_id&quot;:10826863,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/36650987/download_file?st=MTczOTcxMjAzNSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="26320445" href="https://independent.academia.edu/AdewaleAdelabu">Samuel A Adelabu</a><script data-card-contents-for-user="26320445" type="text/json">{"id":26320445,"first_name":"Samuel","last_name":"Adelabu","domain_name":"independent","page_name":"AdewaleAdelabu","display_name":"Samuel A Adelabu","profile_url":"https://independent.academia.edu/AdewaleAdelabu?f_ri=1185","photo":"https://0.academia-photos.com/26320445/7271241/8181735/s65_adewale.adelabu.jpg"}</script></span></span></li><li class="js-paper-rank-work_10826863 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="10826863"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 10826863, container: ".js-paper-rank-work_10826863", }); });</script></li><li class="js-percentile-work_10826863 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 10826863; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_10826863"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_10826863 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="10826863"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 10826863; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=10826863]").text(description); $(".js-view-count-work_10826863").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_10826863").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="10826863"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">2</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="2008" rel="nofollow" href="https://www.academia.edu/Documents/in/Machine_Learning">Machine Learning</a><script data-card-contents-for-ri="2008" type="text/json">{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=10826863]'), work: {"id":10826863,"title":"Multi-column Deep Neural Networks for Image Classification","created_at":"2015-02-15T22:40:28.840-08:00","url":"https://www.academia.edu/10826863/Multi_column_Deep_Neural_Networks_for_Image_Classification?f_ri=1185","dom_id":"work_10826863","summary":"Traditional methods of computer vision and machine learning cannot match human performance on tasks such as the recognition of handwritten digits or traffic signs. Our biologically plausible, wide and deep artificial neural network architectures can. Small (often minimal) receptive fields of convolutional winner-take-all neurons yield large network depth, resulting in roughly as many sparsely connected neural layers as found in mammals between retina and visual cortex. Only winner neurons are trained. Several deep neural columns become experts on inputs preprocessed in different ways; their predictions are averaged. Graphics cards allow for fast training. On the very competitive MNIST handwriting benchmark, our method is the first to achieve near-human performance. On a traffic sign recognition benchmark it outperforms humans by a factor of two. We also improve the state-of-the-art on a plethora of common image classification benchmarks.","downloadable_attachments":[{"id":36650987,"asset_id":10826863,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":26320445,"first_name":"Samuel","last_name":"Adelabu","domain_name":"independent","page_name":"AdewaleAdelabu","display_name":"Samuel A Adelabu","profile_url":"https://independent.academia.edu/AdewaleAdelabu?f_ri=1185","photo":"https://0.academia-photos.com/26320445/7271241/8181735/s65_adewale.adelabu.jpg"}],"research_interests":[{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=1185","nofollow":true}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_7665271 coauthored" data-work_id="7665271" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/7665271/On_line_tool_wear_monitoring_using_geometric_descriptors_from_digital_images">On-line tool wear monitoring using geometric descriptors from digital images</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">A new method based on a computer vision and statistical learning system is proposed to estimate the wear level in cutting inserts and to identify the time for its replacement. AISI SAE 1045 and 4140 steel bars of 250 mm of length and 90... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_7665271" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">A new method based on a computer vision and statistical learning system is proposed to estimate the wear level in cutting inserts and to identify the time for its replacement. AISI SAE 1045 and 4140 steel bars of 250 mm of length and 90 mm of diameter were machined using a CNC parallel lathe. The image acquisition system comprised a Pulnix PE2015 B/W camera; a 70XL industrial zoom, with an extension tube of 1X; several lenses, a DCR®III regulated light source and a diffuse lighting system. The images were captured by a Matrox Meteor II card and pre-processed and segmented with Matlab. For each wear region, a set of 9 geometrical descriptors was obtained. The cluster analysis revealed the presence of three distinct categories that corresponded to low, medium and high wear levels. The effectiveness of the classification was verified by means of a LDA class reconstruction that reported a Fowlkes-Mallows index of 0.8571. The LDA likelihood estimates of the wear region provide a useful tool insert replacement criterion. ABSTRACT A new method based on a computer vision and statistical learning system is proposed to estimate the wear level in cutting inserts and to identify the time for its replacement. AISI SAE 1045 and 4140 steel bars of 250 mm of length and 90 mm of diameter were machined using a CNC parallel lathe. The image acquisition system comprised a Pulnix PE2015 B/W camera; a 70XL industrial zoom, with an extension tube of 1X; several lenses, a DCR®III regulated light source and a diffuse lighting system. The images were captured by a Matrox Meteor II card and pre-processed and segmented with Matlab. For each wear region, a set of 9 geometrical descriptors was obtained. The cluster analysis revealed the presence of three distinct categories that corresponded to low, medium and high wear levels. The effectiveness of the classification was verified by means of a LDA class reconstruction that reported a Fowlkes-Mallows index of 0.8571. The LDA likelihood estimates of the wear region provide a useful tool insert replacement criterion.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/7665271" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="97f112168508eb44f057482470e404e7" rel="nofollow" data-download="{&quot;attachment_id&quot;:48378733,&quot;asset_id&quot;:7665271,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/48378733/download_file?st=MTczOTcxMjAzNSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="13916453" href="https://universidaddeleon.academia.edu/EnriqueAlegre">Enrique Alegre</a><script data-card-contents-for-user="13916453" type="text/json">{"id":13916453,"first_name":"Enrique","last_name":"Alegre","domain_name":"universidaddeleon","page_name":"EnriqueAlegre","display_name":"Enrique Alegre","profile_url":"https://universidaddeleon.academia.edu/EnriqueAlegre?f_ri=1185","photo":"https://0.academia-photos.com/13916453/12772117/14199478/s65_enrique.alegre.jpg"}</script></span></span><span class="u-displayInlineBlock InlineList-item-text">&nbsp;and&nbsp;<span class="u-textDecorationUnderline u-clickable InlineList-item-text js-work-more-authors-7665271">+1</span><div class="hidden js-additional-users-7665271"><div><span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a href="https://idl.academia.edu/ManuelLimas">Manuel Limas</a></span></div></div></span><script>(function(){ var popoverSettings = { el: $('.js-work-more-authors-7665271'), placement: 'bottom', hide_delay: 200, html: true, content: function(){ return $('.js-additional-users-7665271').html(); } } new HoverPopover(popoverSettings); })();</script></li><li class="js-paper-rank-work_7665271 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="7665271"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 7665271, container: ".js-paper-rank-work_7665271", }); });</script></li><li class="js-percentile-work_7665271 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 7665271; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_7665271"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_7665271 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="7665271"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 7665271; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=7665271]").text(description); $(".js-view-count-work_7665271").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_7665271").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="7665271"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">16</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="854" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Vision">Computer Vision</a>,&nbsp;<script data-card-contents-for-ri="854" type="text/json">{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5187" rel="nofollow" href="https://www.academia.edu/Documents/in/Statistical_Analysis">Statistical Analysis</a>,&nbsp;<script data-card-contents-for-ri="5187" type="text/json">{"id":5187,"name":"Statistical Analysis","url":"https://www.academia.edu/Documents/in/Statistical_Analysis?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="6177" rel="nofollow" href="https://www.academia.edu/Documents/in/Modeling">Modeling</a><script data-card-contents-for-ri="6177" type="text/json">{"id":6177,"name":"Modeling","url":"https://www.academia.edu/Documents/in/Modeling?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=7665271]'), work: {"id":7665271,"title":"On-line tool wear monitoring using geometric descriptors from digital images","created_at":"2014-07-14T17:38:31.371-07:00","url":"https://www.academia.edu/7665271/On_line_tool_wear_monitoring_using_geometric_descriptors_from_digital_images?f_ri=1185","dom_id":"work_7665271","summary":"A new method based on a computer vision and statistical learning system is proposed to estimate the wear level in cutting inserts and to identify the time for its replacement. AISI SAE 1045 and 4140 steel bars of 250 mm of length and 90 mm of diameter were machined using a CNC parallel lathe. The image acquisition system comprised a Pulnix PE2015 B/W camera; a 70XL industrial zoom, with an extension tube of 1X; several lenses, a DCR®III regulated light source and a diffuse lighting system. The images were captured by a Matrox Meteor II card and pre-processed and segmented with Matlab. For each wear region, a set of 9 geometrical descriptors was obtained. The cluster analysis revealed the presence of three distinct categories that corresponded to low, medium and high wear levels. The effectiveness of the classification was verified by means of a LDA class reconstruction that reported a Fowlkes-Mallows index of 0.8571. The LDA likelihood estimates of the wear region provide a useful tool insert replacement criterion. ABSTRACT A new method based on a computer vision and statistical learning system is proposed to estimate the wear level in cutting inserts and to identify the time for its replacement. AISI SAE 1045 and 4140 steel bars of 250 mm of length and 90 mm of diameter were machined using a CNC parallel lathe. The image acquisition system comprised a Pulnix PE2015 B/W camera; a 70XL industrial zoom, with an extension tube of 1X; several lenses, a DCR®III regulated light source and a diffuse lighting system. The images were captured by a Matrox Meteor II card and pre-processed and segmented with Matlab. For each wear region, a set of 9 geometrical descriptors was obtained. The cluster analysis revealed the presence of three distinct categories that corresponded to low, medium and high wear levels. The effectiveness of the classification was verified by means of a LDA class reconstruction that reported a Fowlkes-Mallows index of 0.8571. The LDA likelihood estimates of the wear region provide a useful tool insert replacement criterion.","downloadable_attachments":[{"id":48378733,"asset_id":7665271,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":13916453,"first_name":"Enrique","last_name":"Alegre","domain_name":"universidaddeleon","page_name":"EnriqueAlegre","display_name":"Enrique Alegre","profile_url":"https://universidaddeleon.academia.edu/EnriqueAlegre?f_ri=1185","photo":"https://0.academia-photos.com/13916453/12772117/14199478/s65_enrique.alegre.jpg"},{"id":31522381,"first_name":"Manuel","last_name":"Limas","domain_name":"idl","page_name":"ManuelLimas","display_name":"Manuel Limas","profile_url":"https://idl.academia.edu/ManuelLimas?f_ri=1185","photo":"https://gravatar.com/avatar/9782ca72ba7c9b3cd207d5a0e49e4912?s=65"}],"research_interests":[{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision?f_ri=1185","nofollow":true},{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":5187,"name":"Statistical Analysis","url":"https://www.academia.edu/Documents/in/Statistical_Analysis?f_ri=1185","nofollow":true},{"id":6177,"name":"Modeling","url":"https://www.academia.edu/Documents/in/Modeling?f_ri=1185","nofollow":true},{"id":38246,"name":"Statistical Learning","url":"https://www.academia.edu/Documents/in/Statistical_Learning?f_ri=1185"},{"id":56368,"name":"Image Classification","url":"https://www.academia.edu/Documents/in/Image_Classification?f_ri=1185"},{"id":63360,"name":"Discriminant Analysis","url":"https://www.academia.edu/Documents/in/Discriminant_Analysis?f_ri=1185"},{"id":81327,"name":"Machine Tools","url":"https://www.academia.edu/Documents/in/Machine_Tools?f_ri=1185"},{"id":93217,"name":"Segmentation","url":"https://www.academia.edu/Documents/in/Segmentation?f_ri=1185"},{"id":96825,"name":"Manufacturing Engineering","url":"https://www.academia.edu/Documents/in/Manufacturing_Engineering?f_ri=1185"},{"id":102115,"name":"Tool wear","url":"https://www.academia.edu/Documents/in/Tool_wear?f_ri=1185"},{"id":106145,"name":"Classification","url":"https://www.academia.edu/Documents/in/Classification?f_ri=1185"},{"id":142963,"name":"Eccentricity","url":"https://www.academia.edu/Documents/in/Eccentricity?f_ri=1185"},{"id":254626,"name":"Cluster","url":"https://www.academia.edu/Documents/in/Cluster?f_ri=1185"},{"id":907382,"name":"Digital Image","url":"https://www.academia.edu/Documents/in/Digital_Image?f_ri=1185"},{"id":2504955,"name":"Binary Image","url":"https://www.academia.edu/Documents/in/Binary_Image?f_ri=1185"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_30690519" data-work_id="30690519" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/30690519/Computer_vision_algorithms_on_reconfigurable_logic_arrays">Computer vision algorithms on reconfigurable logic arrays</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Computer vision algorithms are natural candidates for high performance computing systems. Algorithms in computer vision are characterized by complex and repetitive operations on large amounts of data involving a variety of data... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_30690519" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Computer vision algorithms are natural candidates for high performance computing systems. Algorithms in computer vision are characterized by complex and repetitive operations on large amounts of data involving a variety of data interactions (e.g., point operations, neighborhood operations, global operations). In this paper, we describe the use of the custom computing approach to meet the computation and communication needs of computer vision algorithms. By customizing hardware architecture at the instruction level for every application, the optimal grain size needed for the problem at hand and the instruction granularity can be matched. A custom computing approach can also reuse the same hardware by reconfiguring at the software level for different levels of the computer vision application. We demonstrate the advantages of our approach using Splash 2-a Xilinx 4010-based custom computer.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/30690519" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="c85c018a4959c5b98177034eac1ae7ea" rel="nofollow" data-download="{&quot;attachment_id&quot;:51133000,&quot;asset_id&quot;:30690519,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/51133000/download_file?st=MTczOTcxMjAzNSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="58444236" href="https://independent.academia.edu/AnilJain50">Anil Jain</a><script data-card-contents-for-user="58444236" type="text/json">{"id":58444236,"first_name":"Anil","last_name":"Jain","domain_name":"independent","page_name":"AnilJain50","display_name":"Anil Jain","profile_url":"https://independent.academia.edu/AnilJain50?f_ri=1185","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_30690519 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="30690519"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 30690519, container: ".js-paper-rank-work_30690519", }); });</script></li><li class="js-percentile-work_30690519 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 30690519; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_30690519"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_30690519 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="30690519"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 30690519; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=30690519]").text(description); $(".js-view-count-work_30690519").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_30690519").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="30690519"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">18</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="433" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Architecture">Computer Architecture</a>,&nbsp;<script data-card-contents-for-ri="433" type="text/json">{"id":433,"name":"Computer Architecture","url":"https://www.academia.edu/Documents/in/Computer_Architecture?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="440" rel="nofollow" href="https://www.academia.edu/Documents/in/Distributed_Computing">Distributed Computing</a>,&nbsp;<script data-card-contents-for-ri="440" type="text/json">{"id":440,"name":"Distributed Computing","url":"https://www.academia.edu/Documents/in/Distributed_Computing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="854" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Vision">Computer Vision</a>,&nbsp;<script data-card-contents-for-ri="854" type="text/json">{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a><script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=30690519]'), work: {"id":30690519,"title":"Computer vision algorithms on reconfigurable logic arrays","created_at":"2016-12-31T09:32:48.715-08:00","url":"https://www.academia.edu/30690519/Computer_vision_algorithms_on_reconfigurable_logic_arrays?f_ri=1185","dom_id":"work_30690519","summary":"Computer vision algorithms are natural candidates for high performance computing systems. Algorithms in computer vision are characterized by complex and repetitive operations on large amounts of data involving a variety of data interactions (e.g., point operations, neighborhood operations, global operations). In this paper, we describe the use of the custom computing approach to meet the computation and communication needs of computer vision algorithms. By customizing hardware architecture at the instruction level for every application, the optimal grain size needed for the problem at hand and the instruction granularity can be matched. A custom computing approach can also reuse the same hardware by reconfiguring at the software level for different levels of the computer vision application. We demonstrate the advantages of our approach using Splash 2-a Xilinx 4010-based custom computer.","downloadable_attachments":[{"id":51133000,"asset_id":30690519,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":58444236,"first_name":"Anil","last_name":"Jain","domain_name":"independent","page_name":"AnilJain50","display_name":"Anil Jain","profile_url":"https://independent.academia.edu/AnilJain50?f_ri=1185","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":433,"name":"Computer Architecture","url":"https://www.academia.edu/Documents/in/Computer_Architecture?f_ri=1185","nofollow":true},{"id":440,"name":"Distributed Computing","url":"https://www.academia.edu/Documents/in/Distributed_Computing?f_ri=1185","nofollow":true},{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision?f_ri=1185","nofollow":true},{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":1326,"name":"Reconfigurable Computing","url":"https://www.academia.edu/Documents/in/Reconfigurable_Computing?f_ri=1185"},{"id":11598,"name":"Neural Networks","url":"https://www.academia.edu/Documents/in/Neural_Networks?f_ri=1185"},{"id":14417,"name":"Machine Vision","url":"https://www.academia.edu/Documents/in/Machine_Vision?f_ri=1185"},{"id":26870,"name":"Image segmentation","url":"https://www.academia.edu/Documents/in/Image_segmentation?f_ri=1185"},{"id":64561,"name":"Computer Software","url":"https://www.academia.edu/Documents/in/Computer_Software?f_ri=1185"},{"id":91936,"name":"Hardware","url":"https://www.academia.edu/Documents/in/Hardware?f_ri=1185"},{"id":186099,"name":"Grain size","url":"https://www.academia.edu/Documents/in/Grain_size?f_ri=1185"},{"id":189056,"name":"Surface Reconstruction","url":"https://www.academia.edu/Documents/in/Surface_Reconstruction?f_ri=1185"},{"id":302692,"name":"Layout","url":"https://www.academia.edu/Documents/in/Layout?f_ri=1185"},{"id":850706,"name":"High performance computer","url":"https://www.academia.edu/Documents/in/High_performance_computer?f_ri=1185"},{"id":1707689,"name":"Pixel","url":"https://www.academia.edu/Documents/in/Pixel?f_ri=1185"},{"id":1809959,"name":"Hardware architecture","url":"https://www.academia.edu/Documents/in/Hardware_architecture?f_ri=1185"},{"id":1855177,"name":"RECONFIGURABLE LOGIC","url":"https://www.academia.edu/Documents/in/RECONFIGURABLE_LOGIC?f_ri=1185"},{"id":1931321,"name":"Application Software","url":"https://www.academia.edu/Documents/in/Application_Software?f_ri=1185"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_6791717" data-work_id="6791717" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/6791717/Histogram_based_Perceptual_Quality_Assessment_Method_for_Color_Images">Histogram based Perceptual Quality Assessment Method for Color Images</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">A histogram based perceptual quality assessment (HPQA) method for color images is presented in this paper. Basically, the HPQA combines two quality assessment approaches (color image quality assessment and histogram based image quality... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_6791717" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">A histogram based perceptual quality assessment (HPQA) method for color images is presented in this paper. Basically, the HPQA combines two quality assessment approaches (color image quality assessment and histogram based image quality assessment) and it uses the fourier transform. Its range is between 0 and 1. 1 represents the best quality result, 0 represents the worst quality result for the HPQA. The HPQA results are more suitable than its counterparts in terms of the HVS and they can be obtained faster than the other methods&#39; results. In addition, it can easily differentiate effects of low distortions on color images.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/6791717" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="e80adec340671b527ea18104a6aec7d1" rel="nofollow" data-download="{&quot;attachment_id&quot;:33608713,&quot;asset_id&quot;:6791717,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/33608713/download_file?st=MTczOTcxMjAzNSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="432645" href="https://independent.academia.edu/YILDIRAYYALMAN">YILDIRAY YALMAN</a><script data-card-contents-for-user="432645" type="text/json">{"id":432645,"first_name":"YILDIRAY","last_name":"YALMAN","domain_name":"independent","page_name":"YILDIRAYYALMAN","display_name":"YILDIRAY YALMAN","profile_url":"https://independent.academia.edu/YILDIRAYYALMAN?f_ri=1185","photo":"https://0.academia-photos.com/432645/106985/126711/s65_yildiray.yalman.jpg"}</script></span></span></li><li class="js-paper-rank-work_6791717 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="6791717"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 6791717, container: ".js-paper-rank-work_6791717", }); });</script></li><li class="js-percentile-work_6791717 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 6791717; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_6791717"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_6791717 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="6791717"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 6791717; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=6791717]").text(description); $(".js-view-count-work_6791717").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_6791717").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="6791717"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">2</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="222842" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Quality_Measures">Image Quality Measures</a><script data-card-contents-for-ri="222842" type="text/json">{"id":222842,"name":"Image Quality Measures","url":"https://www.academia.edu/Documents/in/Image_Quality_Measures?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=6791717]'), work: {"id":6791717,"title":"Histogram based Perceptual Quality Assessment Method for Color Images","created_at":"2014-04-17T17:30:15.449-07:00","url":"https://www.academia.edu/6791717/Histogram_based_Perceptual_Quality_Assessment_Method_for_Color_Images?f_ri=1185","dom_id":"work_6791717","summary":"A histogram based perceptual quality assessment (HPQA) method for color images is presented in this paper. Basically, the HPQA combines two quality assessment approaches (color image quality assessment and histogram based image quality assessment) and it uses the fourier transform. Its range is between 0 and 1. 1 represents the best quality result, 0 represents the worst quality result for the HPQA. The HPQA results are more suitable than its counterparts in terms of the HVS and they can be obtained faster than the other methods' results. In addition, it can easily differentiate effects of low distortions on color images.","downloadable_attachments":[{"id":33608713,"asset_id":6791717,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":432645,"first_name":"YILDIRAY","last_name":"YALMAN","domain_name":"independent","page_name":"YILDIRAYYALMAN","display_name":"YILDIRAY YALMAN","profile_url":"https://independent.academia.edu/YILDIRAYYALMAN?f_ri=1185","photo":"https://0.academia-photos.com/432645/106985/126711/s65_yildiray.yalman.jpg"}],"research_interests":[{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":222842,"name":"Image Quality Measures","url":"https://www.academia.edu/Documents/in/Image_Quality_Measures?f_ri=1185","nofollow":true}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_29265307" data-work_id="29265307" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/29265307/Kernel_Multivariate_Analysis_Framework_for_Supervised_Subspace_Learning_A_Tutorial_on_Linear_and_Kernel_Multivariate_Methods">Kernel Multivariate Analysis Framework for Supervised Subspace Learning: A Tutorial on Linear and Kernel Multivariate Methods</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Feature extraction and dimensionality reduction are important tasks in many fields of science dealing with signal processing and analysis. The relevance of these techniques is increasing as current sensory devices are developed with ever... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_29265307" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Feature extraction and dimensionality reduction are important tasks in many fields of science dealing with signal processing and analysis. The relevance of these techniques is increasing as current sensory devices are developed with ever higher resolution, and problems involving multimodal data sources become more common. A plethora of feature extraction methods are available in the literature collectively grouped under the field of Multivariate Analysis (MVA). This paper provides a uniform treatment of several methods: Principal Component Analysis (PCA), Partial Least Squares (PLS), Canonical Correlation Analysis (CCA) and Orthonormalized PLS (OPLS), as well as their non-linear extensions derived by means of the theory of reproducing kernel Hilbert spaces. We also review their connections to other methods for classification and statistical dependence estimation, and introduce some recent developments to deal with the extreme cases of large-scale and low-sized problems. To illustrate the wide applicability of these methods in both classification and regression problems, we analyze their performance in a benchmark of publicly available data sets, and pay special attention to specific real applications involving audio processing for music genre prediction and hyperspectral satellite images for Earth and climate monitoring.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/29265307" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="e7d8a20712e2af2c17fee7e74a082dfb" rel="nofollow" data-download="{&quot;attachment_id&quot;:49714609,&quot;asset_id&quot;:29265307,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/49714609/download_file?st=MTczOTcxMjAzNSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="55270275" href="https://independent.academia.edu/JeronimoArenasGarcia">Jeronimo Arenas-Garcia</a><script data-card-contents-for-user="55270275" type="text/json">{"id":55270275,"first_name":"Jeronimo","last_name":"Arenas-Garcia","domain_name":"independent","page_name":"JeronimoArenasGarcia","display_name":"Jeronimo Arenas-Garcia","profile_url":"https://independent.academia.edu/JeronimoArenasGarcia?f_ri=1185","photo":"https://0.academia-photos.com/55270275/160772968/150480684/s65_jeronimo.arenas-garcia.jpeg"}</script></span></span></li><li class="js-paper-rank-work_29265307 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="29265307"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 29265307, container: ".js-paper-rank-work_29265307", }); });</script></li><li class="js-percentile-work_29265307 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 29265307; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_29265307"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_29265307 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="29265307"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 29265307; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=29265307]").text(description); $(".js-view-count-work_29265307").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_29265307").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="29265307"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">21</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="60" rel="nofollow" href="https://www.academia.edu/Documents/in/Mechanical_Engineering">Mechanical Engineering</a>,&nbsp;<script data-card-contents-for-ri="60" type="text/json">{"id":60,"name":"Mechanical Engineering","url":"https://www.academia.edu/Documents/in/Mechanical_Engineering?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1252" rel="nofollow" href="https://www.academia.edu/Documents/in/Remote_Sensing">Remote Sensing</a>,&nbsp;<script data-card-contents-for-ri="1252" type="text/json">{"id":1252,"name":"Remote Sensing","url":"https://www.academia.edu/Documents/in/Remote_Sensing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="2008" rel="nofollow" href="https://www.academia.edu/Documents/in/Machine_Learning">Machine Learning</a><script data-card-contents-for-ri="2008" type="text/json">{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=29265307]'), work: {"id":29265307,"title":"Kernel Multivariate Analysis Framework for Supervised Subspace Learning: A Tutorial on Linear and Kernel Multivariate Methods","created_at":"2016-10-19T06:17:34.518-07:00","url":"https://www.academia.edu/29265307/Kernel_Multivariate_Analysis_Framework_for_Supervised_Subspace_Learning_A_Tutorial_on_Linear_and_Kernel_Multivariate_Methods?f_ri=1185","dom_id":"work_29265307","summary":"Feature extraction and dimensionality reduction are important tasks in many fields of science dealing with signal processing and analysis. The relevance of these techniques is increasing as current sensory devices are developed with ever higher resolution, and problems involving multimodal data sources become more common. A plethora of feature extraction methods are available in the literature collectively grouped under the field of Multivariate Analysis (MVA). This paper provides a uniform treatment of several methods: Principal Component Analysis (PCA), Partial Least Squares (PLS), Canonical Correlation Analysis (CCA) and Orthonormalized PLS (OPLS), as well as their non-linear extensions derived by means of the theory of reproducing kernel Hilbert spaces. We also review their connections to other methods for classification and statistical dependence estimation, and introduce some recent developments to deal with the extreme cases of large-scale and low-sized problems. To illustrate the wide applicability of these methods in both classification and regression problems, we analyze their performance in a benchmark of publicly available data sets, and pay special attention to specific real applications involving audio processing for music genre prediction and hyperspectral satellite images for Earth and climate monitoring.","downloadable_attachments":[{"id":49714609,"asset_id":29265307,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":55270275,"first_name":"Jeronimo","last_name":"Arenas-Garcia","domain_name":"independent","page_name":"JeronimoArenasGarcia","display_name":"Jeronimo Arenas-Garcia","profile_url":"https://independent.academia.edu/JeronimoArenasGarcia?f_ri=1185","photo":"https://0.academia-photos.com/55270275/160772968/150480684/s65_jeronimo.arenas-garcia.jpeg"}],"research_interests":[{"id":60,"name":"Mechanical Engineering","url":"https://www.academia.edu/Documents/in/Mechanical_Engineering?f_ri=1185","nofollow":true},{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":1252,"name":"Remote Sensing","url":"https://www.academia.edu/Documents/in/Remote_Sensing?f_ri=1185","nofollow":true},{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=1185","nofollow":true},{"id":2141,"name":"Signal Processing","url":"https://www.academia.edu/Documents/in/Signal_Processing?f_ri=1185"},{"id":4148,"name":"Audio Signal Processing","url":"https://www.academia.edu/Documents/in/Audio_Signal_Processing?f_ri=1185"},{"id":5069,"name":"Principal Component Analysis","url":"https://www.academia.edu/Documents/in/Principal_Component_Analysis?f_ri=1185"},{"id":6020,"name":"Earth","url":"https://www.academia.edu/Documents/in/Earth?f_ri=1185"},{"id":15661,"name":"Audio Processing","url":"https://www.academia.edu/Documents/in/Audio_Processing?f_ri=1185"},{"id":47555,"name":"Dimensionality Reduction","url":"https://www.academia.edu/Documents/in/Dimensionality_Reduction?f_ri=1185"},{"id":106213,"name":"Partial Least Squares","url":"https://www.academia.edu/Documents/in/Partial_Least_Squares?f_ri=1185"},{"id":123230,"name":"Regression Analysis","url":"https://www.academia.edu/Documents/in/Regression_Analysis?f_ri=1185"},{"id":160144,"name":"Feature Extraction","url":"https://www.academia.edu/Documents/in/Feature_Extraction?f_ri=1185"},{"id":174775,"name":"Pca","url":"https://www.academia.edu/Documents/in/Pca?f_ri=1185"},{"id":285356,"name":"Hyperspectral Imaging","url":"https://www.academia.edu/Documents/in/Hyperspectral_Imaging?f_ri=1185"},{"id":342317,"name":"OPLS","url":"https://www.academia.edu/Documents/in/OPLS?f_ri=1185"},{"id":423536,"name":"Canonical Correlation Analysis","url":"https://www.academia.edu/Documents/in/Canonical_Correlation_Analysis?f_ri=1185"},{"id":884835,"name":"Reproducing Kernel Hilbert Space","url":"https://www.academia.edu/Documents/in/Reproducing_Kernel_Hilbert_Space?f_ri=1185"},{"id":988387,"name":"Hilbert Space","url":"https://www.academia.edu/Documents/in/Hilbert_Space?f_ri=1185"},{"id":1237788,"name":"Electrical And Electronic Engineering","url":"https://www.academia.edu/Documents/in/Electrical_And_Electronic_Engineering?f_ri=1185"},{"id":1970623,"name":"RKHS","url":"https://www.academia.edu/Documents/in/RKHS?f_ri=1185"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_9208536" data-work_id="9208536" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/9208536/MPEG_4_Low_Delay_Design_for_HDTV_with_Multi_stream_Approach">MPEG-4 Low Delay Design for HDTV with Multi-stream Approach</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">The different MPEG standards are well known for their media compression capabilities. MPEG-4 evolved in effort by JVT to develop a codec that maximize quality and minimize bit rates. MPEG-4 Version2 Simple Profile which we choose contain... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_9208536" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">The different MPEG standards are well known for their media compression capabilities. MPEG-4 evolved in effort by JVT to develop a codec that maximize quality and minimize bit rates. MPEG-4 Version2 Simple Profile which we choose contain several novel tools for the low-bitrate coding of audio and video. However, in typical conditions, the latency needed to compress/decompress a digital video signal at Standard Definition (SD) resolution is of the order of 15 frames, which corresponds to 0.5 sec. This delay would be more in case of HDTV because of its high resolution and bigger video size.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/9208536" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="879abdf2a3b452947ba7b16551f34983" rel="nofollow" data-download="{&quot;attachment_id&quot;:35487209,&quot;asset_id&quot;:9208536,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/35487209/download_file?st=MTczOTcxMjAzNSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="21097383" href="https://independent.academia.edu/B%C3%ACnhTr%E1%BA%A7nV%C4%83n">Bình Trần Văn</a><script data-card-contents-for-user="21097383" type="text/json">{"id":21097383,"first_name":"Bình","last_name":"Trần Văn","domain_name":"independent","page_name":"BìnhTrầnVăn","display_name":"Bình Trần Văn","profile_url":"https://independent.academia.edu/B%C3%ACnhTr%E1%BA%A7nV%C4%83n?f_ri=1185","photo":"https://0.academia-photos.com/21097383/5820146/6613220/s65_b_nh.tr_n_v_n.jpg_oh_348003712c31f3befd0d3469b3d68ffe_oe_54e03289___gda___1428085734_a4e119773757903c01d4a80637189cc9"}</script></span></span></li><li class="js-paper-rank-work_9208536 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="9208536"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 9208536, container: ".js-paper-rank-work_9208536", }); });</script></li><li class="js-percentile-work_9208536 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 9208536; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_9208536"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_9208536 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="9208536"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 9208536; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=9208536]").text(description); $(".js-view-count-work_9208536").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_9208536").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="9208536"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">2</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="2345" rel="nofollow" href="https://www.academia.edu/Documents/in/Wireless_Communications">Wireless Communications</a><script data-card-contents-for-ri="2345" type="text/json">{"id":2345,"name":"Wireless Communications","url":"https://www.academia.edu/Documents/in/Wireless_Communications?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=9208536]'), work: {"id":9208536,"title":"MPEG-4 Low Delay Design for HDTV with Multi-stream Approach","created_at":"2014-11-09T12:10:52.346-08:00","url":"https://www.academia.edu/9208536/MPEG_4_Low_Delay_Design_for_HDTV_with_Multi_stream_Approach?f_ri=1185","dom_id":"work_9208536","summary":"The different MPEG standards are well known for their media compression capabilities. MPEG-4 evolved in effort by JVT to develop a codec that maximize quality and minimize bit rates. MPEG-4 Version2 Simple Profile which we choose contain several novel tools for the low-bitrate coding of audio and video. However, in typical conditions, the latency needed to compress/decompress a digital video signal at Standard Definition (SD) resolution is of the order of 15 frames, which corresponds to 0.5 sec. This delay would be more in case of HDTV because of its high resolution and bigger video size.","downloadable_attachments":[{"id":35487209,"asset_id":9208536,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":21097383,"first_name":"Bình","last_name":"Trần Văn","domain_name":"independent","page_name":"BìnhTrầnVăn","display_name":"Bình Trần Văn","profile_url":"https://independent.academia.edu/B%C3%ACnhTr%E1%BA%A7nV%C4%83n?f_ri=1185","photo":"https://0.academia-photos.com/21097383/5820146/6613220/s65_b_nh.tr_n_v_n.jpg_oh_348003712c31f3befd0d3469b3d68ffe_oe_54e03289___gda___1428085734_a4e119773757903c01d4a80637189cc9"}],"research_interests":[{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":2345,"name":"Wireless Communications","url":"https://www.academia.edu/Documents/in/Wireless_Communications?f_ri=1185","nofollow":true}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_7548105" data-work_id="7548105" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/7548105/Face_Recognition_Based_Door_Lock_System_Using_Opencv_and_C_with_Remote_Access_and_Security_Features">Face Recognition Based Door Lock System Using Opencv and C# with Remote Access and Security Features</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">This paper investigates the accuracy and effectiveness of the face detection and recognition algorithms using OpenCV and C# computer language. The adaboost algorithm [2] is used for face detection and PCA algorithm[1] is used for face... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_7548105" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">This paper investigates the accuracy and effectiveness of the face detection and recognition algorithms using OpenCV and C# computer language. The adaboost algorithm [2] is used for face detection and PCA algorithm[1] is used for face recognition. This paper also investigates the robustness of the face recognition system when an unknown person is being detected, wherein the system will send an email to the owner of the system using SMTP [7]. The door lock can also be accessed remotely from any part of the world by using a Dropbox [8] account.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/7548105" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="6a2dcaf37b52356c4daa32e7ee48db3d" rel="nofollow" data-download="{&quot;attachment_id&quot;:34110803,&quot;asset_id&quot;:7548105,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/34110803/download_file?st=MTczOTcxMjAzNSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="656912" href="https://ijera.academia.edu/ijera">IJERA Journal</a><script data-card-contents-for-user="656912" type="text/json">{"id":656912,"first_name":"IJERA","last_name":"Journal","domain_name":"ijera","page_name":"ijera","display_name":"IJERA Journal","profile_url":"https://ijera.academia.edu/ijera?f_ri=1185","photo":"https://0.academia-photos.com/656912/1164437/1458041/s65_ijera._www.ijera.com_.jpg"}</script></span></span></li><li class="js-paper-rank-work_7548105 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="7548105"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 7548105, container: ".js-paper-rank-work_7548105", }); });</script></li><li class="js-percentile-work_7548105 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 7548105; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_7548105"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_7548105 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="7548105"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 7548105; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=7548105]").text(description); $(".js-view-count-work_7548105").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_7548105").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="7548105"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">6</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="2008" rel="nofollow" href="https://www.academia.edu/Documents/in/Machine_Learning">Machine Learning</a>,&nbsp;<script data-card-contents-for-ri="2008" type="text/json">{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="7943" rel="nofollow" href="https://www.academia.edu/Documents/in/Recognition">Recognition</a>,&nbsp;<script data-card-contents-for-ri="7943" type="text/json">{"id":7943,"name":"Recognition","url":"https://www.academia.edu/Documents/in/Recognition?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="151088" rel="nofollow" href="https://www.academia.edu/Documents/in/Detection">Detection</a><script data-card-contents-for-ri="151088" type="text/json">{"id":151088,"name":"Detection","url":"https://www.academia.edu/Documents/in/Detection?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=7548105]'), work: {"id":7548105,"title":"Face Recognition Based Door Lock System Using Opencv and C# with Remote Access and Security Features","created_at":"2014-07-03T14:13:24.728-07:00","url":"https://www.academia.edu/7548105/Face_Recognition_Based_Door_Lock_System_Using_Opencv_and_C_with_Remote_Access_and_Security_Features?f_ri=1185","dom_id":"work_7548105","summary":"This paper investigates the accuracy and effectiveness of the face detection and recognition algorithms using OpenCV and C# computer language. The adaboost algorithm [2] is used for face detection and PCA algorithm[1] is used for face recognition. This paper also investigates the robustness of the face recognition system when an unknown person is being detected, wherein the system will send an email to the owner of the system using SMTP [7]. The door lock can also be accessed remotely from any part of the world by using a Dropbox [8] account.","downloadable_attachments":[{"id":34110803,"asset_id":7548105,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":656912,"first_name":"IJERA","last_name":"Journal","domain_name":"ijera","page_name":"ijera","display_name":"IJERA Journal","profile_url":"https://ijera.academia.edu/ijera?f_ri=1185","photo":"https://0.academia-photos.com/656912/1164437/1458041/s65_ijera._www.ijera.com_.jpg"}],"research_interests":[{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=1185","nofollow":true},{"id":7943,"name":"Recognition","url":"https://www.academia.edu/Documents/in/Recognition?f_ri=1185","nofollow":true},{"id":151088,"name":"Detection","url":"https://www.academia.edu/Documents/in/Detection?f_ri=1185","nofollow":true},{"id":311120,"name":"OpenCV","url":"https://www.academia.edu/Documents/in/OpenCV?f_ri=1185"},{"id":1188997,"name":"C","url":"https://www.academia.edu/Documents/in/C?f_ri=1185"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_34629401" data-work_id="34629401" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/34629401/Chapter_7_Coordinate_metrology">Chapter 7 Coordinate metrology</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Coordinate metrology is a field of metrology that is becoming increasingly popular in the manufacturing industry. Coordinate metrology enables the three-dimensional measurement to be carried out on complex object in a single setup. The... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_34629401" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Coordinate metrology is a field of metrology that is becoming increasingly popular in the manufacturing industry. Coordinate metrology enables the three-dimensional measurement to be carried out on complex object in a single setup. The instrument used for this purpose is known as the coordinate measuring machine or CMM. In general, the CMM comprises three frames that move along three orthogonal axes, i.e. X-, Y-and Z-axis. Usually, a contact device known as measuring probe is attached to the end of the Z-axis. The displacement along each axis is measured by a linear measurement system and the readings are sent to an electronic controller. The electronic controller is connected to a computer that also enables various types of data processing to be performed. Repeated measurements on similar objects can be done easily by programming the motion of the axes of the machine. This reduces the time taken for measurement and inspection up to 80% to 90%. These machines are made in various sizes and the methods of operation are based on either manual or computer-aided. 7.2. Types of CMM The basic CMM consists of three axes, each provided with a guide way that enable precise movement along a straight line. Each guide way has a carrier that moves along. The carrier enables the second carrier to move along a straight line based on the first guide way. Each axis is fitted with a precision scale that records the position of the carrier measured from a reference point. The measuring probe is fitted to the carrier on the third axis. When the measuring probe touches the object being measured, the measurement system records the position of all three axes. There are several physical configurations of CMMs. All the configurations have a method of moving the probe along three axes relative to the object. Although there are many designs of CMMs but they can be grouped into five basic types as follows: (a) Cantilever type, (b) Bridge type, (c) Horizontal arm type, (d) Column type and (e) Gantry type 7.2.1 Cantilever type CMM The schematic diagram of the cantilever type CMM is shown in Figure 6.1. In the cantilever type CMM the measuring probe it attached to the Z-axis and moves in the vertical direction. The Z-axis carrier is fitted to the cantilever arm and provides movement in the Y-direction. The Z-axis movement is provided by the table. This type of CMM design provides easy access to the work area and has high workspace volume.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/34629401" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="1020b12914b2ab0d56e87b66e7fcf86e" rel="nofollow" data-download="{&quot;attachment_id&quot;:54492376,&quot;asset_id&quot;:34629401,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/54492376/download_file?st=MTczOTcxMjAzNSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="68512499" href="https://independent.academia.edu/rahulsaraswat7">rahul saraswat</a><script data-card-contents-for-user="68512499" type="text/json">{"id":68512499,"first_name":"rahul","last_name":"saraswat","domain_name":"independent","page_name":"rahulsaraswat7","display_name":"rahul saraswat","profile_url":"https://independent.academia.edu/rahulsaraswat7?f_ri=1185","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_34629401 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="34629401"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 34629401, container: ".js-paper-rank-work_34629401", }); });</script></li><li class="js-percentile-work_34629401 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 34629401; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_34629401"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_34629401 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="34629401"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 34629401; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=34629401]").text(description); $(".js-view-count-work_34629401").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_34629401").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="34629401"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">2</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="60" rel="nofollow" href="https://www.academia.edu/Documents/in/Mechanical_Engineering">Mechanical Engineering</a>,&nbsp;<script data-card-contents-for-ri="60" type="text/json">{"id":60,"name":"Mechanical Engineering","url":"https://www.academia.edu/Documents/in/Mechanical_Engineering?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a><script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=34629401]'), work: {"id":34629401,"title":"Chapter 7 Coordinate metrology","created_at":"2017-09-21T00:55:23.297-07:00","url":"https://www.academia.edu/34629401/Chapter_7_Coordinate_metrology?f_ri=1185","dom_id":"work_34629401","summary":"Coordinate metrology is a field of metrology that is becoming increasingly popular in the manufacturing industry. Coordinate metrology enables the three-dimensional measurement to be carried out on complex object in a single setup. The instrument used for this purpose is known as the coordinate measuring machine or CMM. In general, the CMM comprises three frames that move along three orthogonal axes, i.e. X-, Y-and Z-axis. Usually, a contact device known as measuring probe is attached to the end of the Z-axis. The displacement along each axis is measured by a linear measurement system and the readings are sent to an electronic controller. The electronic controller is connected to a computer that also enables various types of data processing to be performed. Repeated measurements on similar objects can be done easily by programming the motion of the axes of the machine. This reduces the time taken for measurement and inspection up to 80% to 90%. These machines are made in various sizes and the methods of operation are based on either manual or computer-aided. 7.2. Types of CMM The basic CMM consists of three axes, each provided with a guide way that enable precise movement along a straight line. Each guide way has a carrier that moves along. The carrier enables the second carrier to move along a straight line based on the first guide way. Each axis is fitted with a precision scale that records the position of the carrier measured from a reference point. The measuring probe is fitted to the carrier on the third axis. When the measuring probe touches the object being measured, the measurement system records the position of all three axes. There are several physical configurations of CMMs. All the configurations have a method of moving the probe along three axes relative to the object. Although there are many designs of CMMs but they can be grouped into five basic types as follows: (a) Cantilever type, (b) Bridge type, (c) Horizontal arm type, (d) Column type and (e) Gantry type 7.2.1 Cantilever type CMM The schematic diagram of the cantilever type CMM is shown in Figure 6.1. In the cantilever type CMM the measuring probe it attached to the Z-axis and moves in the vertical direction. The Z-axis carrier is fitted to the cantilever arm and provides movement in the Y-direction. The Z-axis movement is provided by the table. This type of CMM design provides easy access to the work area and has high workspace volume.","downloadable_attachments":[{"id":54492376,"asset_id":34629401,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":68512499,"first_name":"rahul","last_name":"saraswat","domain_name":"independent","page_name":"rahulsaraswat7","display_name":"rahul saraswat","profile_url":"https://independent.academia.edu/rahulsaraswat7?f_ri=1185","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":60,"name":"Mechanical Engineering","url":"https://www.academia.edu/Documents/in/Mechanical_Engineering?f_ri=1185","nofollow":true},{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_31021289" data-work_id="31021289" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/31021289/Fast_Box_Filter_with_Subpixel_Accuracy">Fast Box Filter with Subpixel Accuracy</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Box filter is simple and well known technique for image resizing. This technical paper describes an algorithm for image downscaling (shrinking) using box filter with subpixel accuracy, based on color space rescaling, which avoids oating... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_31021289" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Box filter is simple and well known technique for image resizing. This technical paper describes an algorithm for image downscaling (shrinking) using box filter with subpixel accuracy, based on color space rescaling, which avoids oating point arithmetics. All operations in accompaniment C++ code are completely integer based, which signicantly improves image shrinking speed and quality.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/31021289" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="b457dec02a785da2ec73bed28f47fcfd" rel="nofollow" data-download="{&quot;attachment_id&quot;:51453575,&quot;asset_id&quot;:31021289,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/51453575/download_file?st=MTczOTcxMjAzNSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="43713845" href="https://independent.academia.edu/Sini%C5%A1aPetri%C4%87">Siniša Petrić</a><script data-card-contents-for-user="43713845" type="text/json">{"id":43713845,"first_name":"Siniša","last_name":"Petrić","domain_name":"independent","page_name":"SinišaPetrić","display_name":"Siniša Petrić","profile_url":"https://independent.academia.edu/Sini%C5%A1aPetri%C4%87?f_ri=1185","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_31021289 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="31021289"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 31021289, container: ".js-paper-rank-work_31021289", }); });</script></li><li class="js-percentile-work_31021289 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 31021289; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_31021289"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_31021289 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="31021289"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 31021289; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=31021289]").text(description); $(".js-view-count-work_31021289").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_31021289").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="31021289"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">6</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="4389" rel="nofollow" href="https://www.academia.edu/Documents/in/Resampling_Methods">Resampling Methods</a>,&nbsp;<script data-card-contents-for-ri="4389" type="text/json">{"id":4389,"name":"Resampling Methods","url":"https://www.academia.edu/Documents/in/Resampling_Methods?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="35938" rel="nofollow" href="https://www.academia.edu/Documents/in/Digital_Image_Processing">Digital Image Processing</a>,&nbsp;<script data-card-contents-for-ri="35938" type="text/json">{"id":35938,"name":"Digital Image Processing","url":"https://www.academia.edu/Documents/in/Digital_Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="285000" rel="nofollow" href="https://www.academia.edu/Documents/in/Filters">Filters</a><script data-card-contents-for-ri="285000" type="text/json">{"id":285000,"name":"Filters","url":"https://www.academia.edu/Documents/in/Filters?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=31021289]'), work: {"id":31021289,"title":"Fast Box Filter with Subpixel Accuracy","created_at":"2017-01-21T03:21:07.714-08:00","url":"https://www.academia.edu/31021289/Fast_Box_Filter_with_Subpixel_Accuracy?f_ri=1185","dom_id":"work_31021289","summary":"Box filter is simple and well known technique for image resizing. This technical paper describes an algorithm for image downscaling (shrinking) using box filter with subpixel accuracy, based on color space rescaling, which avoids oating point arithmetics. All operations in accompaniment C++ code are completely integer based, which signicantly improves image shrinking speed and quality.","downloadable_attachments":[{"id":51453575,"asset_id":31021289,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":43713845,"first_name":"Siniša","last_name":"Petrić","domain_name":"independent","page_name":"SinišaPetrić","display_name":"Siniša Petrić","profile_url":"https://independent.academia.edu/Sini%C5%A1aPetri%C4%87?f_ri=1185","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":4389,"name":"Resampling Methods","url":"https://www.academia.edu/Documents/in/Resampling_Methods?f_ri=1185","nofollow":true},{"id":35938,"name":"Digital Image Processing","url":"https://www.academia.edu/Documents/in/Digital_Image_Processing?f_ri=1185","nofollow":true},{"id":285000,"name":"Filters","url":"https://www.academia.edu/Documents/in/Filters?f_ri=1185","nofollow":true},{"id":357244,"name":"Image resizing","url":"https://www.academia.edu/Documents/in/Image_resizing?f_ri=1185"},{"id":1874005,"name":"Linear Interpolation","url":"https://www.academia.edu/Documents/in/Linear_Interpolation?f_ri=1185"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_34972924" data-work_id="34972924" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/34972924/Digital_Restoration_of_Cracks_Based_on_Image_Processing">Digital Restoration of Cracks Based on Image Processing</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><a href="https://irjet.net/archives/V2/i9/IRJET-V2I9256.pdf" rel="nofollow">https://irjet.net/archives/V2/i9/IRJET-V2I9256.pdf</a></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/34972924" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="d21b2b60bd67aaca8da7302a764864d7" rel="nofollow" data-download="{&quot;attachment_id&quot;:54836362,&quot;asset_id&quot;:34972924,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/54836362/download_file?st=MTczOTcxMjAzNSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="31493941" href="https://irjet.academia.edu/IRJET">IRJET Journal</a><script data-card-contents-for-user="31493941" type="text/json">{"id":31493941,"first_name":"IRJET","last_name":"Journal","domain_name":"irjet","page_name":"IRJET","display_name":"IRJET Journal","profile_url":"https://irjet.academia.edu/IRJET?f_ri=1185","photo":"https://0.academia-photos.com/31493941/9304077/11813823/s65_irjet.journal.jpg"}</script></span></span></li><li class="js-paper-rank-work_34972924 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="34972924"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 34972924, container: ".js-paper-rank-work_34972924", }); });</script></li><li class="js-percentile-work_34972924 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 34972924; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_34972924"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_34972924 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="34972924"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 34972924; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=34972924]").text(description); $(".js-view-count-work_34972924").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_34972924").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="34972924"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i></div><span class="InlineList-item-text u-textTruncate u-pl6x"><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a><script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (false) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=34972924]'), work: {"id":34972924,"title":"Digital Restoration of Cracks Based on Image Processing","created_at":"2017-10-27T01:38:47.463-07:00","url":"https://www.academia.edu/34972924/Digital_Restoration_of_Cracks_Based_on_Image_Processing?f_ri=1185","dom_id":"work_34972924","summary":"https://irjet.net/archives/V2/i9/IRJET-V2I9256.pdf","downloadable_attachments":[{"id":54836362,"asset_id":34972924,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":31493941,"first_name":"IRJET","last_name":"Journal","domain_name":"irjet","page_name":"IRJET","display_name":"IRJET Journal","profile_url":"https://irjet.academia.edu/IRJET?f_ri=1185","photo":"https://0.academia-photos.com/31493941/9304077/11813823/s65_irjet.journal.jpg"}],"research_interests":[{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_31623728 coauthored" data-work_id="31623728" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/31623728/Hand_pose_reconstruction_using_2_camera_stereo_vision_system">Hand pose reconstruction using 2-camera stereo vision system</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest">In general , we present a cheap , reliable and scaleble propsal for hand tracking problem that estimates the pose of human hand to a 3D hand model constructed from a depth map and generated by a stereo vision system.</div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/31623728" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="e89ddfcc79f5fab56f5b38601326d006" rel="nofollow" data-download="{&quot;attachment_id&quot;:51949195,&quot;asset_id&quot;:31623728,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/51949195/download_file?st=MTczOTcxMjAzNSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="60700389" href="https://elshoroukacademy.academia.edu/MohamedIslamFares">Mohamed Islam Fares</a><script data-card-contents-for-user="60700389" type="text/json">{"id":60700389,"first_name":"Mohamed","last_name":"Islam Fares","domain_name":"elshoroukacademy","page_name":"MohamedIslamFares","display_name":"Mohamed Islam Fares","profile_url":"https://elshoroukacademy.academia.edu/MohamedIslamFares?f_ri=1185","photo":"https://0.academia-photos.com/60700389/15799651/16324048/s65_mohamed.islam_fares.jpg"}</script></span></span><span class="u-displayInlineBlock InlineList-item-text">&nbsp;and&nbsp;<span class="u-textDecorationUnderline u-clickable InlineList-item-text js-work-more-authors-31623728">+1</span><div class="hidden js-additional-users-31623728"><div><span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a href="https://elshoroukacademy.academia.edu/HaderAlim">Hader Alim</a></span></div></div></span><script>(function(){ var popoverSettings = { el: $('.js-work-more-authors-31623728'), placement: 'bottom', hide_delay: 200, html: true, content: function(){ return $('.js-additional-users-31623728').html(); } } new HoverPopover(popoverSettings); })();</script></li><li class="js-paper-rank-work_31623728 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="31623728"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 31623728, container: ".js-paper-rank-work_31623728", }); });</script></li><li class="js-percentile-work_31623728 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 31623728; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_31623728"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_31623728 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="31623728"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 31623728; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=31623728]").text(description); $(".js-view-count-work_31623728").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_31623728").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="31623728"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">7</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="422" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Science">Computer Science</a>,&nbsp;<script data-card-contents-for-ri="422" type="text/json">{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="445" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Graphics">Computer Graphics</a>,&nbsp;<script data-card-contents-for-ri="445" type="text/json">{"id":445,"name":"Computer Graphics","url":"https://www.academia.edu/Documents/in/Computer_Graphics?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="854" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Vision">Computer Vision</a>,&nbsp;<script data-card-contents-for-ri="854" type="text/json">{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a><script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=31623728]'), work: {"id":31623728,"title":"Hand pose reconstruction using 2-camera stereo vision system","created_at":"2017-02-27T00:24:55.320-08:00","url":"https://www.academia.edu/31623728/Hand_pose_reconstruction_using_2_camera_stereo_vision_system?f_ri=1185","dom_id":"work_31623728","summary":"In general , we present a cheap , reliable and scaleble propsal for hand tracking problem that estimates the pose of human hand to a 3D hand model constructed from a depth map and generated by a stereo vision system.","downloadable_attachments":[{"id":51949195,"asset_id":31623728,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":60700389,"first_name":"Mohamed","last_name":"Islam Fares","domain_name":"elshoroukacademy","page_name":"MohamedIslamFares","display_name":"Mohamed Islam Fares","profile_url":"https://elshoroukacademy.academia.edu/MohamedIslamFares?f_ri=1185","photo":"https://0.academia-photos.com/60700389/15799651/16324048/s65_mohamed.islam_fares.jpg"},{"id":20136922,"first_name":"Hader","last_name":"Alim","domain_name":"elshoroukacademy","page_name":"HaderAlim","display_name":"Hader Alim","profile_url":"https://elshoroukacademy.academia.edu/HaderAlim?f_ri=1185","photo":"https://0.academia-photos.com/20136922/14812281/16325775/s65_hader.alim.jpeg"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=1185","nofollow":true},{"id":445,"name":"Computer Graphics","url":"https://www.academia.edu/Documents/in/Computer_Graphics?f_ri=1185","nofollow":true},{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision?f_ri=1185","nofollow":true},{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":2151,"name":"Virtual Reality (Computer Graphics)","url":"https://www.academia.edu/Documents/in/Virtual_Reality_Computer_Graphics_?f_ri=1185"},{"id":5673,"name":"Augmented Reality","url":"https://www.academia.edu/Documents/in/Augmented_Reality?f_ri=1185"},{"id":19706,"name":"Stereo Vision (Computer Vision)","url":"https://www.academia.edu/Documents/in/Stereo_Vision_Computer_Vision_?f_ri=1185"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_49935805" data-work_id="49935805" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" rel="nofollow" href="https://www.academia.edu/49935805/An_Efficient_Base_4_Leading_Zero_Detector_Design">An Efficient Base-4 Leading Zero Detector Design</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">A base-4 leading zero detector (LZD) design is proposed in this paper. The design is similar to the approach originally proposed by V.G. Oklobdzija with a different technique. The circuit modules used in... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_49935805" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">A&nbsp; base-4&nbsp; leading&nbsp; zero&nbsp; detector&nbsp; (LZD)&nbsp; design&nbsp; is&nbsp; proposed&nbsp; in&nbsp; this&nbsp; paper.&nbsp; The&nbsp; design&nbsp; is&nbsp; similar&nbsp; to&nbsp; the <br />approach&nbsp; originally&nbsp; proposed&nbsp; by&nbsp; V.G.&nbsp; Oklobdzija&nbsp; with&nbsp; a&nbsp; different&nbsp; technique.&nbsp; The&nbsp; circuit&nbsp; modules&nbsp; used&nbsp; in <br />the&nbsp; base-4&nbsp; LZD&nbsp; approach&nbsp; are&nbsp; designed&nbsp; and&nbsp; several&nbsp; N-bit&nbsp; LZD&nbsp; circuits&nbsp; are&nbsp; implemented&nbsp; with&nbsp; a&nbsp; standard-<br />cell&nbsp; realization&nbsp; in&nbsp; the&nbsp; Taiwan&nbsp; Semiconductor&nbsp; Manufacturing&nbsp; Company&nbsp; (TSMC)&nbsp; 0.65um&nbsp; CMOS&nbsp; process. <br />The&nbsp; performance&nbsp; and&nbsp; layout&nbsp; area&nbsp; of&nbsp; the&nbsp; base-4&nbsp; LZD&nbsp; realization&nbsp; is&nbsp; compared&nbsp; for&nbsp; implementations&nbsp; that <br />contain only 4-to-1 and 2-to-1 multiplexers</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/49935805" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="0845c57f1628ddfd429f6d2f3ddd1002" rel="nofollow" data-download="{&quot;attachment_id&quot;:68108262,&quot;asset_id&quot;:49935805,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/68108262/download_file?st=MTczOTcxMjAzNSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="51025406" rel="nofollow" href="https://independent.academia.edu/ElectricalEngineeringAnInternationalJournalEEIJ">Electrical Engineering: An International Journal (EEIJ)</a><script data-card-contents-for-user="51025406" type="text/json">{"id":51025406,"first_name":"Electrical Engineering: An International Journal","last_name":"(EEIJ)","domain_name":"independent","page_name":"ElectricalEngineeringAnInternationalJournalEEIJ","display_name":"Electrical Engineering: An International Journal (EEIJ)","profile_url":"https://independent.academia.edu/ElectricalEngineeringAnInternationalJournalEEIJ?f_ri=1185","photo":"https://0.academia-photos.com/51025406/14101882/20546344/s65_electrical_engineering_an_international_journal._eeij_.jpg"}</script></span></span></li><li class="js-paper-rank-work_49935805 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="49935805"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 49935805, container: ".js-paper-rank-work_49935805", }); });</script></li><li class="js-percentile-work_49935805 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 49935805; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_49935805"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_49935805 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="49935805"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 49935805; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=49935805]").text(description); $(".js-view-count-work_49935805").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_49935805").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="49935805"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">8</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="49" rel="nofollow" href="https://www.academia.edu/Documents/in/Electrical_Engineering">Electrical Engineering</a>,&nbsp;<script data-card-contents-for-ri="49" type="text/json">{"id":49,"name":"Electrical Engineering","url":"https://www.academia.edu/Documents/in/Electrical_Engineering?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="4148" rel="nofollow" href="https://www.academia.edu/Documents/in/Audio_Signal_Processing">Audio Signal Processing</a>,&nbsp;<script data-card-contents-for-ri="4148" type="text/json">{"id":4148,"name":"Audio Signal Processing","url":"https://www.academia.edu/Documents/in/Audio_Signal_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="10019" rel="nofollow" href="https://www.academia.edu/Documents/in/Photonics">Photonics</a><script data-card-contents-for-ri="10019" type="text/json">{"id":10019,"name":"Photonics","url":"https://www.academia.edu/Documents/in/Photonics?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=49935805]'), work: {"id":49935805,"title":"An Efficient Base-4 Leading Zero Detector Design","created_at":"2021-07-15T03:30:56.705-07:00","url":"https://www.academia.edu/49935805/An_Efficient_Base_4_Leading_Zero_Detector_Design?f_ri=1185","dom_id":"work_49935805","summary":"A base-4 leading zero detector (LZD) design is proposed in this paper. The design is similar to the \napproach originally proposed by V.G. Oklobdzija with a different technique. The circuit modules used in \nthe base-4 LZD approach are designed and several N-bit LZD circuits are implemented with a standard-\ncell realization in the Taiwan Semiconductor Manufacturing Company (TSMC) 0.65um CMOS process. \nThe performance and layout area of the base-4 LZD realization is compared for implementations that \ncontain only 4-to-1 and 2-to-1 multiplexers","downloadable_attachments":[{"id":68108262,"asset_id":49935805,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":51025406,"first_name":"Electrical Engineering: An International Journal","last_name":"(EEIJ)","domain_name":"independent","page_name":"ElectricalEngineeringAnInternationalJournalEEIJ","display_name":"Electrical Engineering: An International Journal (EEIJ)","profile_url":"https://independent.academia.edu/ElectricalEngineeringAnInternationalJournalEEIJ?f_ri=1185","photo":"https://0.academia-photos.com/51025406/14101882/20546344/s65_electrical_engineering_an_international_journal._eeij_.jpg"}],"research_interests":[{"id":49,"name":"Electrical Engineering","url":"https://www.academia.edu/Documents/in/Electrical_Engineering?f_ri=1185","nofollow":true},{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":4148,"name":"Audio Signal Processing","url":"https://www.academia.edu/Documents/in/Audio_Signal_Processing?f_ri=1185","nofollow":true},{"id":10019,"name":"Photonics","url":"https://www.academia.edu/Documents/in/Photonics?f_ri=1185","nofollow":true},{"id":17733,"name":"Nanotechnology","url":"https://www.academia.edu/Documents/in/Nanotechnology?f_ri=1185"},{"id":34234,"name":"Electrical and Electronics","url":"https://www.academia.edu/Documents/in/Electrical_and_Electronics?f_ri=1185"},{"id":55405,"name":"Sensors","url":"https://www.academia.edu/Documents/in/Sensors?f_ri=1185"},{"id":182928,"name":"Electric Circuits","url":"https://www.academia.edu/Documents/in/Electric_Circuits?f_ri=1185"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_10702991" data-work_id="10702991" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/10702991/ANALOG_COMMUICATION_LAB_LABORATORY_MANUAL_DEPARTMENT_OF_ELECTRONICS_AND_COMMUNICATIONS_ENGG_MALLA_REDDY_COLLEGE_OF_ENGINEERING_AND_TECHNOLOGY">ANALOG COMMUICATION LAB LABORATORY MANUAL DEPARTMENT OF ELECTRONICS AND COMMUNICATIONS ENGG MALLA REDDY COLLEGE OF ENGINEERING AND TECHNOLOGY</a></div></div><div class="u-pb4x u-mt3x"></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/10702991" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="347fce2a9fccfb0845cc50c2cc3f4cc1" rel="nofollow" data-download="{&quot;attachment_id&quot;:36597626,&quot;asset_id&quot;:10702991,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/36597626/download_file?st=MTczOTcxMjAzNSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="26114172" href="https://annauniv.academia.edu/ArunKumar31">Arun Kumar</a><script data-card-contents-for-user="26114172" type="text/json">{"id":26114172,"first_name":"Arun","last_name":"Kumar","domain_name":"annauniv","page_name":"ArunKumar31","display_name":"Arun Kumar","profile_url":"https://annauniv.academia.edu/ArunKumar31?f_ri=1185","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_10702991 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="10702991"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 10702991, container: ".js-paper-rank-work_10702991", }); });</script></li><li class="js-percentile-work_10702991 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 10702991; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_10702991"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_10702991 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="10702991"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 10702991; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=10702991]").text(description); $(".js-view-count-work_10702991").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_10702991").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="10702991"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i></div><span class="InlineList-item-text u-textTruncate u-pl6x"><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a><script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (false) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=10702991]'), work: {"id":10702991,"title":"ANALOG COMMUICATION LAB LABORATORY MANUAL DEPARTMENT OF ELECTRONICS AND COMMUNICATIONS ENGG MALLA REDDY COLLEGE OF ENGINEERING AND TECHNOLOGY","created_at":"2015-02-11T05:26:47.592-08:00","url":"https://www.academia.edu/10702991/ANALOG_COMMUICATION_LAB_LABORATORY_MANUAL_DEPARTMENT_OF_ELECTRONICS_AND_COMMUNICATIONS_ENGG_MALLA_REDDY_COLLEGE_OF_ENGINEERING_AND_TECHNOLOGY?f_ri=1185","dom_id":"work_10702991","summary":null,"downloadable_attachments":[{"id":36597626,"asset_id":10702991,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":26114172,"first_name":"Arun","last_name":"Kumar","domain_name":"annauniv","page_name":"ArunKumar31","display_name":"Arun Kumar","profile_url":"https://annauniv.academia.edu/ArunKumar31?f_ri=1185","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_36204891 coauthored" data-work_id="36204891" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/36204891/DIGITAL_TELESCOPE_STAR_FINDER_USING_ARDUINO_AND_STELLARIUM">DIGITAL TELESCOPE STAR FINDER USING ARDUINO AND STELLARIUM</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">— This paper aims to develop a cost-effective device to make a star tracking system using an Arduino DUE [6] and Stellarium [1]. It also includes the processing of the image via MATLAB Ⓡ [2] that is obtained from the telescope in near... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_36204891" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">— This paper aims to develop a cost-effective device to make a star tracking system using an Arduino DUE [6] and Stellarium [1]. It also includes the processing of the image via MATLAB Ⓡ [2] that is obtained from the telescope in near future [3].</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/36204891" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="fddb4833ed75bdf7d1bc9b9f00738f06" rel="nofollow" data-download="{&quot;attachment_id&quot;:56106188,&quot;asset_id&quot;:36204891,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/56106188/download_file?st=MTczOTcxMjAzNSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="56201118" href="https://cambridge.academia.edu/SaswataDasgupta">Saswata Dasgupta</a><script data-card-contents-for-user="56201118" type="text/json">{"id":56201118,"first_name":"Saswata","last_name":"Dasgupta","domain_name":"cambridge","page_name":"SaswataDasgupta","display_name":"Saswata Dasgupta","profile_url":"https://cambridge.academia.edu/SaswataDasgupta?f_ri=1185","photo":"https://0.academia-photos.com/56201118/19319704/19243089/s65_saswata.dasgupta.jpg"}</script></span></span><span class="u-displayInlineBlock InlineList-item-text">&nbsp;and&nbsp;<span class="u-textDecorationUnderline u-clickable InlineList-item-text js-work-more-authors-36204891">+2</span><div class="hidden js-additional-users-36204891"><div><span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a href="https://independent.academia.edu/NilavaChanda">Nilava Chanda</a></span></div><div><span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a href="https://independent.academia.edu/GhoshIman">Iman Ghosh</a></span></div></div></span><script>(function(){ var popoverSettings = { el: $('.js-work-more-authors-36204891'), placement: 'bottom', hide_delay: 200, html: true, content: function(){ return $('.js-additional-users-36204891').html(); } } new HoverPopover(popoverSettings); })();</script></li><li class="js-paper-rank-work_36204891 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="36204891"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 36204891, container: ".js-paper-rank-work_36204891", }); });</script></li><li class="js-percentile-work_36204891 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 36204891; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_36204891"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_36204891 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="36204891"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 36204891; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=36204891]").text(description); $(".js-view-count-work_36204891").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_36204891").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="36204891"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">4</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="516" rel="nofollow" href="https://www.academia.edu/Documents/in/Optics">Optics</a>,&nbsp;<script data-card-contents-for-ri="516" type="text/json">{"id":516,"name":"Optics","url":"https://www.academia.edu/Documents/in/Optics?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="37939" rel="nofollow" href="https://www.academia.edu/Documents/in/Arduino">Arduino</a>,&nbsp;<script data-card-contents-for-ri="37939" type="text/json">{"id":37939,"name":"Arduino","url":"https://www.academia.edu/Documents/in/Arduino?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="38383" rel="nofollow" href="https://www.academia.edu/Documents/in/Telescope_Making">Telescope Making</a><script data-card-contents-for-ri="38383" type="text/json">{"id":38383,"name":"Telescope Making","url":"https://www.academia.edu/Documents/in/Telescope_Making?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=36204891]'), work: {"id":36204891,"title":"DIGITAL TELESCOPE STAR FINDER USING ARDUINO AND STELLARIUM","created_at":"2018-03-19T21:39:03.513-07:00","url":"https://www.academia.edu/36204891/DIGITAL_TELESCOPE_STAR_FINDER_USING_ARDUINO_AND_STELLARIUM?f_ri=1185","dom_id":"work_36204891","summary":"— This paper aims to develop a cost-effective device to make a star tracking system using an Arduino DUE [6] and Stellarium [1]. It also includes the processing of the image via MATLAB Ⓡ [2] that is obtained from the telescope in near future [3].","downloadable_attachments":[{"id":56106188,"asset_id":36204891,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":56201118,"first_name":"Saswata","last_name":"Dasgupta","domain_name":"cambridge","page_name":"SaswataDasgupta","display_name":"Saswata Dasgupta","profile_url":"https://cambridge.academia.edu/SaswataDasgupta?f_ri=1185","photo":"https://0.academia-photos.com/56201118/19319704/19243089/s65_saswata.dasgupta.jpg"},{"id":79473306,"first_name":"Nilava","last_name":"Chanda","domain_name":"independent","page_name":"NilavaChanda","display_name":"Nilava Chanda","profile_url":"https://independent.academia.edu/NilavaChanda?f_ri=1185","photo":"/images/s65_no_pic.png"},{"id":79488496,"first_name":"Iman","last_name":"Ghosh","domain_name":"independent","page_name":"GhoshIman","display_name":"Iman Ghosh","profile_url":"https://independent.academia.edu/GhoshIman?f_ri=1185","photo":"https://0.academia-photos.com/79488496/26144647/24758515/s65_iman.ghosh.jpg"}],"research_interests":[{"id":516,"name":"Optics","url":"https://www.academia.edu/Documents/in/Optics?f_ri=1185","nofollow":true},{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":37939,"name":"Arduino","url":"https://www.academia.edu/Documents/in/Arduino?f_ri=1185","nofollow":true},{"id":38383,"name":"Telescope Making","url":"https://www.academia.edu/Documents/in/Telescope_Making?f_ri=1185","nofollow":true}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_33612786" data-work_id="33612786" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/33612786/Learning_from_Demonstration_Literature_Review">Learning from Demonstration - Literature Review</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Robotics is the future of automated manufacturing. Robotic systems are inherently more useful in undertaking tasks which might be difficult, or where it is necessary for accuracy to be quite high. However, use of robots in industry or... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_33612786" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Robotics is the future of automated manufacturing. Robotic systems are inherently more useful in undertaking tasks which might be difficult, or where it is necessary for accuracy to be quite high. However, use of robots in industry or other settings, involves complex programming, debugging and subsequent difficulties. Learning from demonstration is a process of automating the programming element of robotics. This report will detail existing literature to do with Learning from demonstration, and will identify the different parts needed to implement such a system, i.e. image processing, analysis, real-world equivalation, generic programming etc. The methodology in this paper will delve slightly into these topics, and provide a basis for implementing these into the Baxter robot by Rethink Robotics.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/33612786" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="cf7025dc5338fb2e61b61be2a05319dd" rel="nofollow" data-download="{&quot;attachment_id&quot;:53630598,&quot;asset_id&quot;:33612786,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/53630598/download_file?st=MTczOTcxMjAzNSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="29302297" href="https://westernsydney.academia.edu/MichaelSeecy">Michael Seecy</a><script data-card-contents-for-user="29302297" type="text/json">{"id":29302297,"first_name":"Michael","last_name":"Seecy","domain_name":"westernsydney","page_name":"MichaelSeecy","display_name":"Michael Seecy","profile_url":"https://westernsydney.academia.edu/MichaelSeecy?f_ri=1185","photo":"https://0.academia-photos.com/29302297/12880585/16378695/s65_michael.seecy.jpg"}</script></span></span></li><li class="js-paper-rank-work_33612786 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="33612786"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 33612786, container: ".js-paper-rank-work_33612786", }); });</script></li><li class="js-percentile-work_33612786 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 33612786; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_33612786"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_33612786 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="33612786"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 33612786; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=33612786]").text(description); $(".js-view-count-work_33612786").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_33612786").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="33612786"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">6</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="9351" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Analysis">Image Analysis</a>,&nbsp;<script data-card-contents-for-ri="9351" type="text/json">{"id":9351,"name":"Image Analysis","url":"https://www.academia.edu/Documents/in/Image_Analysis?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="25395" rel="nofollow" href="https://www.academia.edu/Documents/in/Matlab">Matlab</a>,&nbsp;<script data-card-contents-for-ri="25395" type="text/json">{"id":25395,"name":"Matlab","url":"https://www.academia.edu/Documents/in/Matlab?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="148244" rel="nofollow" href="https://www.academia.edu/Documents/in/Programming_by_Demonstration">Programming by Demonstration</a><script data-card-contents-for-ri="148244" type="text/json">{"id":148244,"name":"Programming by Demonstration","url":"https://www.academia.edu/Documents/in/Programming_by_Demonstration?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=33612786]'), work: {"id":33612786,"title":"Learning from Demonstration - Literature Review","created_at":"2017-06-22T16:43:15.733-07:00","url":"https://www.academia.edu/33612786/Learning_from_Demonstration_Literature_Review?f_ri=1185","dom_id":"work_33612786","summary":"Robotics is the future of automated manufacturing. Robotic systems are inherently more useful in undertaking tasks which might be difficult, or where it is necessary for accuracy to be quite high. However, use of robots in industry or other settings, involves complex programming, debugging and subsequent difficulties. Learning from demonstration is a process of automating the programming element of robotics. This report will detail existing literature to do with Learning from demonstration, and will identify the different parts needed to implement such a system, i.e. image processing, analysis, real-world equivalation, generic programming etc. The methodology in this paper will delve slightly into these topics, and provide a basis for implementing these into the Baxter robot by Rethink Robotics.","downloadable_attachments":[{"id":53630598,"asset_id":33612786,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":29302297,"first_name":"Michael","last_name":"Seecy","domain_name":"westernsydney","page_name":"MichaelSeecy","display_name":"Michael Seecy","profile_url":"https://westernsydney.academia.edu/MichaelSeecy?f_ri=1185","photo":"https://0.academia-photos.com/29302297/12880585/16378695/s65_michael.seecy.jpg"}],"research_interests":[{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":9351,"name":"Image Analysis","url":"https://www.academia.edu/Documents/in/Image_Analysis?f_ri=1185","nofollow":true},{"id":25395,"name":"Matlab","url":"https://www.academia.edu/Documents/in/Matlab?f_ri=1185","nofollow":true},{"id":148244,"name":"Programming by Demonstration","url":"https://www.academia.edu/Documents/in/Programming_by_Demonstration?f_ri=1185","nofollow":true},{"id":226231,"name":"Learning from Demonstration","url":"https://www.academia.edu/Documents/in/Learning_from_Demonstration?f_ri=1185"},{"id":2735035,"name":"Baxter","url":"https://www.academia.edu/Documents/in/Baxter?f_ri=1185"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_31978972" data-work_id="31978972" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/31978972/Improvement_of_the_Recognition_Rate_by_Random_Forest">Improvement of the Recognition Rate by Random Forest</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">In this paper; we introduce a system of automatic recognition of characters based on the Random Forest Method in non-constrictive pictures that are stemmed from the terminals Mobile phone. After doing some pretreatments on the picture,... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_31978972" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">In this paper; we introduce a system of automatic recognition of characters based on the Random Forest Method in non-constrictive pictures that are stemmed from the terminals Mobile phone. After doing some pretreatments on the picture, the text is segmented into lines and then into characters. In the stage of characteristics extraction, we are representing the input data into the vector of primitives of the zoning types, of diagonal, horizontal and of the Zernike moment. These characteristics are linked to pixels&#39; densities and they are extracted on binary pictures. In the classification stage, we examine four classification methods with two different classifiers types namely the multi-layer perceptron (MLP) and the Random Forest method. After some checking tests, the system of learning and recognition which is based on the Random Forest has shown a good performance on a basis of 100 models of pictures.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/31978972" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="418c21f95ea8ac1434e1fb69dc39b508" rel="nofollow" data-download="{&quot;attachment_id&quot;:52251550,&quot;asset_id&quot;:31978972,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/52251550/download_file?st=MTczOTcxMjAzNSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="656912" href="https://ijera.academia.edu/ijera">IJERA Journal</a><script data-card-contents-for-user="656912" type="text/json">{"id":656912,"first_name":"IJERA","last_name":"Journal","domain_name":"ijera","page_name":"ijera","display_name":"IJERA Journal","profile_url":"https://ijera.academia.edu/ijera?f_ri=1185","photo":"https://0.academia-photos.com/656912/1164437/1458041/s65_ijera._www.ijera.com_.jpg"}</script></span></span></li><li class="js-paper-rank-work_31978972 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="31978972"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 31978972, container: ".js-paper-rank-work_31978972", }); });</script></li><li class="js-percentile-work_31978972 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 31978972; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_31978972"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_31978972 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="31978972"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 31978972; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=31978972]").text(description); $(".js-view-count-work_31978972").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_31978972").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="31978972"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">3</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="422" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Science">Computer Science</a>,&nbsp;<script data-card-contents-for-ri="422" type="text/json">{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="2008" rel="nofollow" href="https://www.academia.edu/Documents/in/Machine_Learning">Machine Learning</a><script data-card-contents-for-ri="2008" type="text/json">{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=31978972]'), work: {"id":31978972,"title":"Improvement of the Recognition Rate by Random Forest","created_at":"2017-03-22T02:03:31.202-07:00","url":"https://www.academia.edu/31978972/Improvement_of_the_Recognition_Rate_by_Random_Forest?f_ri=1185","dom_id":"work_31978972","summary":"In this paper; we introduce a system of automatic recognition of characters based on the Random Forest Method in non-constrictive pictures that are stemmed from the terminals Mobile phone. After doing some pretreatments on the picture, the text is segmented into lines and then into characters. In the stage of characteristics extraction, we are representing the input data into the vector of primitives of the zoning types, of diagonal, horizontal and of the Zernike moment. These characteristics are linked to pixels' densities and they are extracted on binary pictures. In the classification stage, we examine four classification methods with two different classifiers types namely the multi-layer perceptron (MLP) and the Random Forest method. After some checking tests, the system of learning and recognition which is based on the Random Forest has shown a good performance on a basis of 100 models of pictures.","downloadable_attachments":[{"id":52251550,"asset_id":31978972,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":656912,"first_name":"IJERA","last_name":"Journal","domain_name":"ijera","page_name":"ijera","display_name":"IJERA Journal","profile_url":"https://ijera.academia.edu/ijera?f_ri=1185","photo":"https://0.academia-photos.com/656912/1164437/1458041/s65_ijera._www.ijera.com_.jpg"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=1185","nofollow":true},{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=1185","nofollow":true}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_31763375" data-work_id="31763375" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/31763375/Improvement_of_the_Recognition_Rate_by_Random_Forest">Improvement of the Recognition Rate by Random Forest</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">In this paper; we introduce a system of automatic recognition of characters based on the Random Forest Method in non-constrictive pictures that are stemmed from the terminals Mobile phone. After doing some pretreatments on the picture,... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_31763375" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">In this paper; we introduce a system of automatic recognition of characters based on the Random Forest Method in non-constrictive pictures that are stemmed from the terminals Mobile phone. After doing some pretreatments on the picture, the text is segmented into lines and then into characters. In the stage of characteristics extraction, we are representing the input data into the vector of primitives of the zoning types, of diagonal, horizontal and of the Zernike moment. These characteristics are linked to pixels&#39; densities and they are extracted on binary pictures. In the classification stage, we examine four classification methods with two different classifiers types namely the multi-layer perceptron (MLP) and the Random Forest method. After some checking tests, the system of learning and recognition which is based on the Random Forest has shown a good performance on a basis of 100 models of pictures.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/31763375" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="ac6a57d29fe039424b254996cd327764" rel="nofollow" data-download="{&quot;attachment_id&quot;:52068763,&quot;asset_id&quot;:31763375,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/52068763/download_file?st=MTczOTcxMjAzNSw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="656912" href="https://ijera.academia.edu/ijera">IJERA Journal</a><script data-card-contents-for-user="656912" type="text/json">{"id":656912,"first_name":"IJERA","last_name":"Journal","domain_name":"ijera","page_name":"ijera","display_name":"IJERA Journal","profile_url":"https://ijera.academia.edu/ijera?f_ri=1185","photo":"https://0.academia-photos.com/656912/1164437/1458041/s65_ijera._www.ijera.com_.jpg"}</script></span></span></li><li class="js-paper-rank-work_31763375 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="31763375"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 31763375, container: ".js-paper-rank-work_31763375", }); });</script></li><li class="js-percentile-work_31763375 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 31763375; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_31763375"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_31763375 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="31763375"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 31763375; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=31763375]").text(description); $(".js-view-count-work_31763375").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_31763375").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="31763375"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">3</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="422" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Science">Computer Science</a>,&nbsp;<script data-card-contents-for-ri="422" type="text/json">{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="2008" rel="nofollow" href="https://www.academia.edu/Documents/in/Machine_Learning">Machine Learning</a><script data-card-contents-for-ri="2008" type="text/json">{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=31763375]'), work: {"id":31763375,"title":"Improvement of the Recognition Rate by Random Forest","created_at":"2017-03-07T23:03:14.024-08:00","url":"https://www.academia.edu/31763375/Improvement_of_the_Recognition_Rate_by_Random_Forest?f_ri=1185","dom_id":"work_31763375","summary":"In this paper; we introduce a system of automatic recognition of characters based on the Random Forest Method in non-constrictive pictures that are stemmed from the terminals Mobile phone. After doing some pretreatments on the picture, the text is segmented into lines and then into characters. In the stage of characteristics extraction, we are representing the input data into the vector of primitives of the zoning types, of diagonal, horizontal and of the Zernike moment. These characteristics are linked to pixels' densities and they are extracted on binary pictures. In the classification stage, we examine four classification methods with two different classifiers types namely the multi-layer perceptron (MLP) and the Random Forest method. After some checking tests, the system of learning and recognition which is based on the Random Forest has shown a good performance on a basis of 100 models of pictures.","downloadable_attachments":[{"id":52068763,"asset_id":31763375,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":656912,"first_name":"IJERA","last_name":"Journal","domain_name":"ijera","page_name":"ijera","display_name":"IJERA Journal","profile_url":"https://ijera.academia.edu/ijera?f_ri=1185","photo":"https://0.academia-photos.com/656912/1164437/1458041/s65_ijera._www.ijera.com_.jpg"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=1185","nofollow":true},{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=1185","nofollow":true}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_34767748" data-work_id="34767748" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/34767748/A_Review_on_Fabric_Defect_Detection_Techniques">A Review on Fabric Defect Detection Techniques</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><a href="https://www.irjet.net/archives/V4/i9/IRJET-V4I926.pdf" rel="nofollow">https://www.irjet.net/archives/V4/i9/IRJET-V4I926.pdf</a></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/34767748" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="7200a70a677946f937cf496fa937c5a1" rel="nofollow" data-download="{&quot;attachment_id&quot;:54626954,&quot;asset_id&quot;:34767748,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/54626954/download_file?st=MTczOTcxMjAzNiw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="31493941" href="https://irjet.academia.edu/IRJET">IRJET Journal</a><script data-card-contents-for-user="31493941" type="text/json">{"id":31493941,"first_name":"IRJET","last_name":"Journal","domain_name":"irjet","page_name":"IRJET","display_name":"IRJET Journal","profile_url":"https://irjet.academia.edu/IRJET?f_ri=1185","photo":"https://0.academia-photos.com/31493941/9304077/11813823/s65_irjet.journal.jpg"}</script></span></span></li><li class="js-paper-rank-work_34767748 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="34767748"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 34767748, container: ".js-paper-rank-work_34767748", }); });</script></li><li class="js-percentile-work_34767748 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 34767748; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_34767748"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_34767748 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="34767748"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 34767748; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=34767748]").text(description); $(".js-view-count-work_34767748").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_34767748").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="34767748"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i></div><span class="InlineList-item-text u-textTruncate u-pl6x"><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a><script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (false) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=34767748]'), work: {"id":34767748,"title":"A Review on Fabric Defect Detection Techniques","created_at":"2017-10-04T21:46:24.924-07:00","url":"https://www.academia.edu/34767748/A_Review_on_Fabric_Defect_Detection_Techniques?f_ri=1185","dom_id":"work_34767748","summary":"https://www.irjet.net/archives/V4/i9/IRJET-V4I926.pdf","downloadable_attachments":[{"id":54626954,"asset_id":34767748,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":31493941,"first_name":"IRJET","last_name":"Journal","domain_name":"irjet","page_name":"IRJET","display_name":"IRJET Journal","profile_url":"https://irjet.academia.edu/IRJET?f_ri=1185","photo":"https://0.academia-photos.com/31493941/9304077/11813823/s65_irjet.journal.jpg"}],"research_interests":[{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_9605514 coauthored" data-work_id="9605514" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/9605514/Penser_limage_II_Anthropologies_du_visuel">Penser l&#39;image II. Anthropologies du visuel</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Ces dernières années ont été le théâtre d&#39;une étonnante résurgence de la question anthropologique. Parmi les propositions les plus débattues, il y a eu celle qui consisterait à penser l&#39;homme non pas comme un animal doué de langage, mais... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_9605514" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Ces dernières années ont été le théâtre d&#39;une étonnante résurgence de la question anthropologique. Parmi les propositions les plus débattues, il y a eu celle qui consisterait à penser l&#39;homme non pas comme un animal doué de langage, mais avant tout comme un homo pictor ou encore comme un homo spectator, capable de produire et de reconnaître ses propres images. Si entre-temps, cette idée d&#39;une anthropologie par l&#39;image a permis d&#39;inaugurer des nouveaux domaines de recherche, comme l&#39;anthropologie visuelle, celle-ci relève cependant d&#39;une histoire déjà plus ancienne dont cet ouvrage livre quelques clés. Entre ceux qui considèrent que les images sont le reflet exact de l&#39;homme et ceux qui, au contraire, sont d&#39;avis que les artefacts visuels mènent une vie dont les raisons échappent à la logique anthropocentrique, se dessinent aujourd&#39;hui les lignes de front de ce qui s&#39;apparente à une nouvelle querelle de l&#39;image.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/9605514" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="69aaae865f865623d648270a918be4da" rel="nofollow" data-download="{&quot;attachment_id&quot;:36907084,&quot;asset_id&quot;:9605514,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/36907084/download_file?st=MTczOTcxMjAzNiw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="3866038" href="https://unifr.academia.edu/EmmanuelAlloa">Emmanuel Alloa</a><script data-card-contents-for-user="3866038" type="text/json">{"id":3866038,"first_name":"Emmanuel","last_name":"Alloa","domain_name":"unifr","page_name":"EmmanuelAlloa","display_name":"Emmanuel Alloa","profile_url":"https://unifr.academia.edu/EmmanuelAlloa?f_ri=1185","photo":"https://0.academia-photos.com/3866038/2211657/60146227/s65_emmanuel.alloa.jpg"}</script></span></span><span class="u-displayInlineBlock InlineList-item-text">&nbsp;and&nbsp;<span class="u-textDecorationUnderline u-clickable InlineList-item-text js-work-more-authors-9605514">+1</span><div class="hidden js-additional-users-9605514"><div><span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a href="https://ehess.academia.edu/carloseveri">carlo severi</a></span></div></div></span><script>(function(){ var popoverSettings = { el: $('.js-work-more-authors-9605514'), placement: 'bottom', hide_delay: 200, html: true, content: function(){ return $('.js-additional-users-9605514').html(); } } new HoverPopover(popoverSettings); })();</script></li><li class="js-paper-rank-work_9605514 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="9605514"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 9605514, container: ".js-paper-rank-work_9605514", }); });</script></li><li class="js-percentile-work_9605514 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 9605514; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_9605514"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_9605514 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="9605514"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 9605514; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=9605514]").text(description); $(".js-view-count-work_9605514").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_9605514").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="9605514"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">14</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="767" rel="nofollow" href="https://www.academia.edu/Documents/in/Anthropology">Anthropology</a>,&nbsp;<script data-card-contents-for-ri="767" type="text/json">{"id":767,"name":"Anthropology","url":"https://www.academia.edu/Documents/in/Anthropology?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="808" rel="nofollow" href="https://www.academia.edu/Documents/in/Aesthetics">Aesthetics</a>,&nbsp;<script data-card-contents-for-ri="808" type="text/json">{"id":808,"name":"Aesthetics","url":"https://www.academia.edu/Documents/in/Aesthetics?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="863" rel="nofollow" href="https://www.academia.edu/Documents/in/Visual_Studies">Visual Studies</a>,&nbsp;<script data-card-contents-for-ri="863" type="text/json">{"id":863,"name":"Visual Studies","url":"https://www.academia.edu/Documents/in/Visual_Studies?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="925" rel="nofollow" href="https://www.academia.edu/Documents/in/Visual_Anthropology">Visual Anthropology</a><script data-card-contents-for-ri="925" type="text/json">{"id":925,"name":"Visual Anthropology","url":"https://www.academia.edu/Documents/in/Visual_Anthropology?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=9605514]'), work: {"id":9605514,"title":"Penser l'image II. Anthropologies du visuel","created_at":"2014-12-03T07:41:04.899-08:00","url":"https://www.academia.edu/9605514/Penser_limage_II_Anthropologies_du_visuel?f_ri=1185","dom_id":"work_9605514","summary":"Ces dernières années ont été le théâtre d'une étonnante résurgence de la question anthropologique. Parmi les propositions les plus débattues, il y a eu celle qui consisterait à penser l'homme non pas comme un animal doué de langage, mais avant tout comme un homo pictor ou encore comme un homo spectator, capable de produire et de reconnaître ses propres images. Si entre-temps, cette idée d'une anthropologie par l'image a permis d'inaugurer des nouveaux domaines de recherche, comme l'anthropologie visuelle, celle-ci relève cependant d'une histoire déjà plus ancienne dont cet ouvrage livre quelques clés. Entre ceux qui considèrent que les images sont le reflet exact de l'homme et ceux qui, au contraire, sont d'avis que les artefacts visuels mènent une vie dont les raisons échappent à la logique anthropocentrique, se dessinent aujourd'hui les lignes de front de ce qui s'apparente à une nouvelle querelle de l'image. ","downloadable_attachments":[{"id":36907084,"asset_id":9605514,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":3866038,"first_name":"Emmanuel","last_name":"Alloa","domain_name":"unifr","page_name":"EmmanuelAlloa","display_name":"Emmanuel Alloa","profile_url":"https://unifr.academia.edu/EmmanuelAlloa?f_ri=1185","photo":"https://0.academia-photos.com/3866038/2211657/60146227/s65_emmanuel.alloa.jpg"},{"id":801856,"first_name":"carlo","last_name":"severi","domain_name":"ehess","page_name":"carloseveri","display_name":"carlo severi","profile_url":"https://ehess.academia.edu/carloseveri?f_ri=1185","photo":"https://0.academia-photos.com/801856/1091425/1361058/s65_carlo.severi.jpeg"}],"research_interests":[{"id":767,"name":"Anthropology","url":"https://www.academia.edu/Documents/in/Anthropology?f_ri=1185","nofollow":true},{"id":808,"name":"Aesthetics","url":"https://www.academia.edu/Documents/in/Aesthetics?f_ri=1185","nofollow":true},{"id":863,"name":"Visual Studies","url":"https://www.academia.edu/Documents/in/Visual_Studies?f_ri=1185","nofollow":true},{"id":925,"name":"Visual Anthropology","url":"https://www.academia.edu/Documents/in/Visual_Anthropology?f_ri=1185","nofollow":true},{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185"},{"id":1441,"name":"Philosophical Anthropology","url":"https://www.academia.edu/Documents/in/Philosophical_Anthropology?f_ri=1185"},{"id":3225,"name":"Visual Culture","url":"https://www.academia.edu/Documents/in/Visual_Culture?f_ri=1185"},{"id":16724,"name":"Vilem Flusser","url":"https://www.academia.edu/Documents/in/Vilem_Flusser?f_ri=1185"},{"id":20073,"name":"Bruno Latour","url":"https://www.academia.edu/Documents/in/Bruno_Latour?f_ri=1185"},{"id":36855,"name":"Hans Jonas","url":"https://www.academia.edu/Documents/in/Hans_Jonas?f_ri=1185"},{"id":51885,"name":"Bildwissenschaft","url":"https://www.academia.edu/Documents/in/Bildwissenschaft?f_ri=1185"},{"id":72127,"name":"Philippe Descola","url":"https://www.academia.edu/Documents/in/Philippe_Descola?f_ri=1185"},{"id":246249,"name":"James Elkins","url":"https://www.academia.edu/Documents/in/James_Elkins?f_ri=1185"},{"id":397147,"name":"Jan Assmann","url":"https://www.academia.edu/Documents/in/Jan_Assmann?f_ri=1185"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_36677778" data-work_id="36677778" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" rel="nofollow" href="https://www.academia.edu/36677778/Solution_Manual_for_Fundamentals_of_Digital_Signal_Processing_Using_MATLAB_2nd_Edition_by_Schilling">Solution Manual for Fundamentals of Digital Signal Processing Using MATLAB 2nd Edition by Schilling</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">1.1 Suppose the input to an amplifier is x a (t) = sin(2πF 0 t) and the steady-state output is y a (t) = 100 sin(2πF 0 t + φ 1) − 2 sin(4πF 0 t + φ 2) + cos(6πF 0 t + φ 3) (a) Is the amplifier a linear system or is it a nonlinear system?... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_36677778" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">1.1 Suppose the input to an amplifier is x a (t) = sin(2πF 0 t) and the steady-state output is y a (t) = 100 sin(2πF 0 t + φ 1) − 2 sin(4πF 0 t + φ 2) + cos(6πF 0 t + φ 3) (a) Is the amplifier a linear system or is it a nonlinear system? (b) What is the gain of the amplifier? (c) Find the average power of the output signal. (d) What is the total harmonic distortion of the amplifier? Solution (a) The amplifier is nonlinear because the steady-state output contains harmonics. (b) From (1.1.2), the amplifier gain is K = 100. (c) From (1.2.4), the output power is P y = d 2 0 4 + 1 2 d 2 1 + d + 2 2 + d 2 3 = .5(100 2 + 2 2 + 1) = 5002.5 (d) From (1.2.5) THD = 100(P y − d 2 1 /2) P y = 100(5002.5 − 5000) 5002.5 = .05%Solution Manual for Fundamentals of Digital Signal Processing Using MATLAB 2nd Edition by Schilling.&nbsp; &nbsp; &nbsp; Full file at <a href="https://testbanku.eu/" rel="nofollow">https://testbanku.eu/</a></div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/36677778" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="cc250f56e929ea8498ecf761c36fd0bb" rel="nofollow" data-download="{&quot;attachment_id&quot;:56614903,&quot;asset_id&quot;:36677778,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/56614903/download_file?st=MTczOTcxMjAzNiw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="81940677" href="https://independent.academia.edu/73WIEN7KI5HK">73WIEN 7KI5HK</a><script data-card-contents-for-user="81940677" type="text/json">{"id":81940677,"first_name":"73WIEN","last_name":"7KI5HK","domain_name":"independent","page_name":"73WIEN7KI5HK","display_name":"73WIEN 7KI5HK","profile_url":"https://independent.academia.edu/73WIEN7KI5HK?f_ri=1185","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_36677778 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="36677778"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 36677778, container: ".js-paper-rank-work_36677778", }); });</script></li><li class="js-percentile-work_36677778 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 36677778; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_36677778"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_36677778 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="36677778"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 36677778; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=36677778]").text(description); $(".js-view-count-work_36677778").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_36677778").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="36677778"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i></div><span class="InlineList-item-text u-textTruncate u-pl6x"><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a><script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (false) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=36677778]'), work: {"id":36677778,"title":"Solution Manual for Fundamentals of Digital Signal Processing Using MATLAB 2nd Edition by Schilling","created_at":"2018-05-20T21:10:31.941-07:00","url":"https://www.academia.edu/36677778/Solution_Manual_for_Fundamentals_of_Digital_Signal_Processing_Using_MATLAB_2nd_Edition_by_Schilling?f_ri=1185","dom_id":"work_36677778","summary":"1.1 Suppose the input to an amplifier is x a (t) = sin(2πF 0 t) and the steady-state output is y a (t) = 100 sin(2πF 0 t + φ 1) − 2 sin(4πF 0 t + φ 2) + cos(6πF 0 t + φ 3) (a) Is the amplifier a linear system or is it a nonlinear system? (b) What is the gain of the amplifier? (c) Find the average power of the output signal. (d) What is the total harmonic distortion of the amplifier? Solution (a) The amplifier is nonlinear because the steady-state output contains harmonics. (b) From (1.1.2), the amplifier gain is K = 100. (c) From (1.2.4), the output power is P y = d 2 0 4 + 1 2 d 2 1 + d + 2 2 + d 2 3 = .5(100 2 + 2 2 + 1) = 5002.5 (d) From (1.2.5) THD = 100(P y − d 2 1 /2) P y = 100(5002.5 − 5000) 5002.5 = .05%Solution Manual for Fundamentals of Digital Signal Processing Using MATLAB 2nd Edition by Schilling. Full file at https://testbanku.eu/","downloadable_attachments":[{"id":56614903,"asset_id":36677778,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":81940677,"first_name":"73WIEN","last_name":"7KI5HK","domain_name":"independent","page_name":"73WIEN7KI5HK","display_name":"73WIEN 7KI5HK","profile_url":"https://independent.academia.edu/73WIEN7KI5HK?f_ri=1185","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_43769125" data-work_id="43769125" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/43769125/Bangla_Handwritten_Numeral_Character_Recognition_Using_Directional_Pattern">Bangla Handwritten Numeral Character Recognition Using Directional Pattern</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Handwritten character recognition has become a challenging and interesting field in the recent days due to its complex character shapes and huge pragmatic applications. A lot of research are already done and underway on English alphabets... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_43769125" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Handwritten character recognition has become a challenging and interesting field in the recent days due to its complex character shapes and huge pragmatic applications. A lot of research are already done and underway on English alphabets and numerals recognition. But in case of Bangla, even being the fifth largest spoken language in the world, has not undergone that much research. Besides, different complex shape of Bangla character makes the recognition more challenging. In this paper, we propose a directional pattern approach for feature extraction of Bangla numeric characters which attains a high accuracy of recognition. We use Local Directional Pattern (LDP) and Gradient Directional Pattern (GDP) for feature extraction and then two well-known machine learning algorithms, K-Nearest Neighbour (KNN) and Support Vector Machine (SVM), to classify the numeric character. We also ensemble the pattern oriented results to enhance the accuracy. Experimental results on the benchmark dataset CMATERdb 3.1.1 demonstrates an astounding recognition rate of accuracy 95.62% without preprocessing the data.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/43769125" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="8dd2e3f22faa8dcab90bbc2b3f7947ef" rel="nofollow" data-download="{&quot;attachment_id&quot;:64082065,&quot;asset_id&quot;:43769125,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/64082065/download_file?st=MTczOTcxMjAzNiw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="155736067" href="https://iutoic-dhaka.academia.edu/TalhaIbnAziz">Talha Ibn Aziz</a><script data-card-contents-for-user="155736067" type="text/json">{"id":155736067,"first_name":"Talha","last_name":"Ibn Aziz","domain_name":"iutoic-dhaka","page_name":"TalhaIbnAziz","display_name":"Talha Ibn Aziz","profile_url":"https://iutoic-dhaka.academia.edu/TalhaIbnAziz?f_ri=1185","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_43769125 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="43769125"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 43769125, container: ".js-paper-rank-work_43769125", }); });</script></li><li class="js-percentile-work_43769125 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 43769125; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_43769125"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_43769125 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="43769125"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 43769125; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=43769125]").text(description); $(".js-view-count-work_43769125").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_43769125").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="43769125"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">3</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="2008" rel="nofollow" href="https://www.academia.edu/Documents/in/Machine_Learning">Machine Learning</a>,&nbsp;<script data-card-contents-for-ri="2008" type="text/json">{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="35938" rel="nofollow" href="https://www.academia.edu/Documents/in/Digital_Image_Processing">Digital Image Processing</a><script data-card-contents-for-ri="35938" type="text/json">{"id":35938,"name":"Digital Image Processing","url":"https://www.academia.edu/Documents/in/Digital_Image_Processing?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=43769125]'), work: {"id":43769125,"title":"Bangla Handwritten Numeral Character Recognition Using Directional Pattern","created_at":"2020-08-02T22:06:13.643-07:00","url":"https://www.academia.edu/43769125/Bangla_Handwritten_Numeral_Character_Recognition_Using_Directional_Pattern?f_ri=1185","dom_id":"work_43769125","summary":"Handwritten character recognition has become a challenging and interesting field in the recent days due to its complex character shapes and huge pragmatic applications. A lot of research are already done and underway on English alphabets and numerals recognition. But in case of Bangla, even being the fifth largest spoken language in the world, has not undergone that much research. Besides, different complex shape of Bangla character makes the recognition more challenging. In this paper, we propose a directional pattern approach for feature extraction of Bangla numeric characters which attains a high accuracy of recognition. We use Local Directional Pattern (LDP) and Gradient Directional Pattern (GDP) for feature extraction and then two well-known machine learning algorithms, K-Nearest Neighbour (KNN) and Support Vector Machine (SVM), to classify the numeric character. We also ensemble the pattern oriented results to enhance the accuracy. Experimental results on the benchmark dataset CMATERdb 3.1.1 demonstrates an astounding recognition rate of accuracy 95.62% without preprocessing the data.","downloadable_attachments":[{"id":64082065,"asset_id":43769125,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":155736067,"first_name":"Talha","last_name":"Ibn Aziz","domain_name":"iutoic-dhaka","page_name":"TalhaIbnAziz","display_name":"Talha Ibn Aziz","profile_url":"https://iutoic-dhaka.academia.edu/TalhaIbnAziz?f_ri=1185","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=1185","nofollow":true},{"id":35938,"name":"Digital Image Processing","url":"https://www.academia.edu/Documents/in/Digital_Image_Processing?f_ri=1185","nofollow":true}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_10776175" data-work_id="10776175" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/10776175/BEARD_AND_MUSTACHE_SEGMENTATION_USING_SPARSE_CLASSIFIERS_ON_SELF_QUOTIENT_IMAGES">BEARD AND MUSTACHE SEGMENTATION USING SPARSE CLASSIFIERS ON SELF-QUOTIENT IMAGES</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">In this paper, we propose a novel system for beard and mustache detection and segmentation in challenging facial images. Our system first eliminates illumination artifacts using the self-quotient algorithm. A sparse classifier is then... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_10776175" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">In this paper, we propose a novel system for beard and mustache detection and segmentation in challenging facial images. Our system first eliminates illumination artifacts using the self-quotient algorithm. A sparse classifier is then used on these self-quotient images to classify a region as either containing skin or facial hair. We conduct experiments on the MBGC and color FERET databases to demonstrate the effectiveness of our proposed system.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/10776175" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="f83baabc1d3c68824d8cb40599e3f669" rel="nofollow" data-download="{&quot;attachment_id&quot;:36628881,&quot;asset_id&quot;:10776175,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/36628881/download_file?st=MTczOTcxMjAzNiw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="26232205" href="https://independent.academia.edu/%E6%98%B1%E9%BA%9F%E6%9D%A8">昱麟 杨</a><script data-card-contents-for-user="26232205" type="text/json">{"id":26232205,"first_name":"昱麟","last_name":"杨","domain_name":"independent","page_name":"昱麟杨","display_name":"昱麟 杨","profile_url":"https://independent.academia.edu/%E6%98%B1%E9%BA%9F%E6%9D%A8?f_ri=1185","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_10776175 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="10776175"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 10776175, container: ".js-paper-rank-work_10776175", }); });</script></li><li class="js-percentile-work_10776175 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 10776175; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_10776175"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_10776175 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="10776175"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 10776175; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=10776175]").text(description); $(".js-view-count-work_10776175").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_10776175").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="10776175"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">2</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="93217" rel="nofollow" href="https://www.academia.edu/Documents/in/Segmentation">Segmentation</a><script data-card-contents-for-ri="93217" type="text/json">{"id":93217,"name":"Segmentation","url":"https://www.academia.edu/Documents/in/Segmentation?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=10776175]'), work: {"id":10776175,"title":"BEARD AND MUSTACHE SEGMENTATION USING SPARSE CLASSIFIERS ON SELF-QUOTIENT IMAGES","created_at":"2015-02-13T13:47:04.840-08:00","url":"https://www.academia.edu/10776175/BEARD_AND_MUSTACHE_SEGMENTATION_USING_SPARSE_CLASSIFIERS_ON_SELF_QUOTIENT_IMAGES?f_ri=1185","dom_id":"work_10776175","summary":"In this paper, we propose a novel system for beard and mustache detection and segmentation in challenging facial images. Our system first eliminates illumination artifacts using the self-quotient algorithm. A sparse classifier is then used on these self-quotient images to classify a region as either containing skin or facial hair. We conduct experiments on the MBGC and color FERET databases to demonstrate the effectiveness of our proposed system.","downloadable_attachments":[{"id":36628881,"asset_id":10776175,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":26232205,"first_name":"昱麟","last_name":"杨","domain_name":"independent","page_name":"昱麟杨","display_name":"昱麟 杨","profile_url":"https://independent.academia.edu/%E6%98%B1%E9%BA%9F%E6%9D%A8?f_ri=1185","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":93217,"name":"Segmentation","url":"https://www.academia.edu/Documents/in/Segmentation?f_ri=1185","nofollow":true}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_81084001" data-work_id="81084001" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/81084001/Face_Recognition_based_Smart_Attendance_System_Using_IoT">Face Recognition based Smart Attendance System Using IoT</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Face recognition based smart attendance system using IoT is a tool for recognizing the students face while taking attendance by using face biometrics based on monitor camera image capturing. In our face recognition based smart attendance... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_81084001" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Face recognition based smart attendance system using IoT is a tool for recognizing the students face while taking attendance by using face biometrics based on monitor camera image capturing. In our face recognition based smart attendance project, a raspberry pi system will be able to find and recognize human faces fast and precisely in images. The long-established method of calling name of each student is tedious and there is always a chance of proxy attendance. The proposed system is based on face recognition to maintain the attendance record of students. As the process of attendance taking starts the system takes pictures of the attendees and then applies face detection and recognition technique to the given image and the recognized students are marked as present and their attendance is updated with corresponding time, student name and register number. We have used deep learning techniques to develop this project.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/81084001" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="66a6729cef2fcad85da248c866933aa8" rel="nofollow" data-download="{&quot;attachment_id&quot;:87251136,&quot;asset_id&quot;:81084001,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/87251136/download_file?st=MTczOTcxMjAzNiw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="31493941" href="https://irjet.academia.edu/IRJET">IRJET Journal</a><script data-card-contents-for-user="31493941" type="text/json">{"id":31493941,"first_name":"IRJET","last_name":"Journal","domain_name":"irjet","page_name":"IRJET","display_name":"IRJET Journal","profile_url":"https://irjet.academia.edu/IRJET?f_ri=1185","photo":"https://0.academia-photos.com/31493941/9304077/11813823/s65_irjet.journal.jpg"}</script></span></span></li><li class="js-paper-rank-work_81084001 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="81084001"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 81084001, container: ".js-paper-rank-work_81084001", }); });</script></li><li class="js-percentile-work_81084001 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 81084001; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_81084001"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_81084001 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="81084001"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 81084001; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=81084001]").text(description); $(".js-view-count-work_81084001").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_81084001").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="81084001"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i></div><span class="InlineList-item-text u-textTruncate u-pl6x"><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a><script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (false) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=81084001]'), work: {"id":81084001,"title":"Face Recognition based Smart Attendance System Using IoT","created_at":"2022-06-09T05:00:48.897-07:00","url":"https://www.academia.edu/81084001/Face_Recognition_based_Smart_Attendance_System_Using_IoT?f_ri=1185","dom_id":"work_81084001","summary":"Face recognition based smart attendance system using IoT is a tool for recognizing the students face while taking attendance by using face biometrics based on monitor camera image capturing. In our face recognition based smart attendance project, a raspberry pi system will be able to find and recognize human faces fast and precisely in images. The long-established method of calling name of each student is tedious and there is always a chance of proxy attendance. The proposed system is based on face recognition to maintain the attendance record of students. As the process of attendance taking starts the system takes pictures of the attendees and then applies face detection and recognition technique to the given image and the recognized students are marked as present and their attendance is updated with corresponding time, student name and register number. We have used deep learning techniques to develop this project.","downloadable_attachments":[{"id":87251136,"asset_id":81084001,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":31493941,"first_name":"IRJET","last_name":"Journal","domain_name":"irjet","page_name":"IRJET","display_name":"IRJET Journal","profile_url":"https://irjet.academia.edu/IRJET?f_ri=1185","photo":"https://0.academia-photos.com/31493941/9304077/11813823/s65_irjet.journal.jpg"}],"research_interests":[{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_77470840" data-work_id="77470840" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" rel="nofollow" href="https://www.academia.edu/77470840/Comparison_of_Various_RCNN_techniques_for_Classification_of_Object_from_Image">Comparison of Various RCNN techniques for Classification of Object from Image</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Object recognition is a very well known problem domain in the field of computer vision and robot vision. In earlier years in neuro science field CNN has played a key role in solving many problems related to identification and recognition... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_77470840" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Object recognition is a very well known problem domain in the field of computer vision and robot vision. In earlier years in neuro science field CNN has played a key role in solving many problems related to identification and recognition of object. As visual system of our brain shares many features with CNN&amp;#39;s properties it is very easy to model and test the problem domain of classification and identification of object. Basically CNN is typically a feed forward architecture; on the other hand visual system is based upon recurrent CNN (RCNN) for incorporating recurrent connections to each convolutional layer. In middle layers each unit is modulated by the activities of its neighboring units. Here Various RCNN techniques (RCNN,FAST RCNN,FASTER RCNN)are implemented for identifying bikes using CALTECH-101 database and alter their performances are compared.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/77470840" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="a331324ad277bf8e55ef2b27255f0320" rel="nofollow" data-download="{&quot;attachment_id&quot;:84802044,&quot;asset_id&quot;:77470840,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/84802044/download_file?st=MTczOTcxMjAzNiw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="137656029" rel="nofollow" href="https://bitmesra.academia.edu/KishoreSenapati">Kishore Senapati</a><script data-card-contents-for-user="137656029" type="text/json">{"id":137656029,"first_name":"Kishore","last_name":"Senapati","domain_name":"bitmesra","page_name":"KishoreSenapati","display_name":"Kishore Senapati","profile_url":"https://bitmesra.academia.edu/KishoreSenapati?f_ri=1185","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_77470840 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="77470840"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 77470840, container: ".js-paper-rank-work_77470840", }); });</script></li><li class="js-percentile-work_77470840 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 77470840; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_77470840"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_77470840 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="77470840"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 77470840; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=77470840]").text(description); $(".js-view-count-work_77470840").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_77470840").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="77470840"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">2</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="422" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Science">Computer Science</a>,&nbsp;<script data-card-contents-for-ri="422" type="text/json">{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a><script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=77470840]'), work: {"id":77470840,"title":"Comparison of Various RCNN techniques for Classification of Object from Image","created_at":"2022-04-24T10:22:28.550-07:00","url":"https://www.academia.edu/77470840/Comparison_of_Various_RCNN_techniques_for_Classification_of_Object_from_Image?f_ri=1185","dom_id":"work_77470840","summary":"Object recognition is a very well known problem domain in the field of computer vision and robot vision. In earlier years in neuro science field CNN has played a key role in solving many problems related to identification and recognition of object. As visual system of our brain shares many features with CNN\u0026#39;s properties it is very easy to model and test the problem domain of classification and identification of object. Basically CNN is typically a feed forward architecture; on the other hand visual system is based upon recurrent CNN (RCNN) for incorporating recurrent connections to each convolutional layer. In middle layers each unit is modulated by the activities of its neighboring units. Here Various RCNN techniques (RCNN,FAST RCNN,FASTER RCNN)are implemented for identifying bikes using CALTECH-101 database and alter their performances are compared.","downloadable_attachments":[{"id":84802044,"asset_id":77470840,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":137656029,"first_name":"Kishore","last_name":"Senapati","domain_name":"bitmesra","page_name":"KishoreSenapati","display_name":"Kishore Senapati","profile_url":"https://bitmesra.academia.edu/KishoreSenapati?f_ri=1185","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=1185","nofollow":true},{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_77236340" data-work_id="77236340" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/77236340/Breast_cancer_detection_using_image_enhancement_and_segmentation_algorithms">Breast cancer detection using image enhancement and segmentation algorithms</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Enhancement of mammography images considers as powerful methods in categorization of breast normal tissues and pathologies. The digital image software gives chance to improve the mammographs and increasing their illustration value. The... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_77236340" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Enhancement of mammography images considers as powerful methods in categorization of breast normal tissues and pathologies. The digital image software gives chance to improve the mammographs and increasing their illustration value. The image processing methods in this paper were using contrast improvement, noise lessening, texture scrutiny and portioning algorithm. The mammography images kept in high quality to conserve the quality. Those methods aim to augment and hone the image intensity and eliminate noise from the images. The assortment factor of augmentation depends on the backdrop tissues and type of the breast lesions; hence, some lesions gave better improvement than the rest due to their density. The computation speed examined used correspondence and matching ratio. The results were 96.3 ± 8.5 (p&gt;0.05). The results showed that the breast lesions could be improved by using the proposed image improvement and segmentation methods.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/77236340" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="105053434abe6c4a29ffda68fc121685" rel="nofollow" data-download="{&quot;attachment_id&quot;:84705715,&quot;asset_id&quot;:77236340,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/84705715/download_file?st=MTczOTcxMjAzNiw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="16447374" href="https://mu-sa.academia.edu/YousifMohamedYousifAbdallah">Prof. Dr. Yousif M . Y Abdallah</a><script data-card-contents-for-user="16447374" type="text/json">{"id":16447374,"first_name":"Prof. Dr. Yousif","last_name":"Abdallah","domain_name":"mu-sa","page_name":"YousifMohamedYousifAbdallah","display_name":"Prof. Dr. Yousif M . Y Abdallah","profile_url":"https://mu-sa.academia.edu/YousifMohamedYousifAbdallah?f_ri=1185","photo":"https://0.academia-photos.com/16447374/4481842/5192193/s65_yousif_mohamed_yousif.abdallah.jpg"}</script></span></span></li><li class="js-paper-rank-work_77236340 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="77236340"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 77236340, container: ".js-paper-rank-work_77236340", }); });</script></li><li class="js-percentile-work_77236340 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 77236340; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_77236340"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_77236340 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="77236340"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 77236340; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=77236340]").text(description); $(".js-view-count-work_77236340").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_77236340").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="77236340"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">8</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="4998" rel="nofollow" href="https://www.academia.edu/Documents/in/Medical_Image_Processing">Medical Image Processing</a>,&nbsp;<script data-card-contents-for-ri="4998" type="text/json">{"id":4998,"name":"Medical Image Processing","url":"https://www.academia.edu/Documents/in/Medical_Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="6802" rel="nofollow" href="https://www.academia.edu/Documents/in/Breast_Cancer">Breast Cancer</a>,&nbsp;<script data-card-contents-for-ri="6802" type="text/json">{"id":6802,"name":"Breast Cancer","url":"https://www.academia.edu/Documents/in/Breast_Cancer?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="15811" rel="nofollow" href="https://www.academia.edu/Documents/in/Biomedical_signal_and_image_processing">Biomedical signal and image processing</a><script data-card-contents-for-ri="15811" type="text/json">{"id":15811,"name":"Biomedical signal and image processing","url":"https://www.academia.edu/Documents/in/Biomedical_signal_and_image_processing?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=77236340]'), work: {"id":77236340,"title":"Breast cancer detection using image enhancement and segmentation algorithms","created_at":"2022-04-21T21:54:37.320-07:00","url":"https://www.academia.edu/77236340/Breast_cancer_detection_using_image_enhancement_and_segmentation_algorithms?f_ri=1185","dom_id":"work_77236340","summary":"Enhancement of mammography images considers as powerful methods in categorization of breast normal tissues and pathologies. The digital image software gives chance to improve the mammographs and increasing their illustration value. The image processing methods in this paper were using contrast improvement, noise lessening, texture scrutiny and portioning algorithm. The mammography images kept in high quality to conserve the quality. Those methods aim to augment and hone the image intensity and eliminate noise from the images. The assortment factor of augmentation depends on the backdrop tissues and type of the breast lesions; hence, some lesions gave better improvement than the rest due to their density. The computation speed examined used correspondence and matching ratio. The results were 96.3 ± 8.5 (p\u003e0.05). The results showed that the breast lesions could be improved by using the proposed image improvement and segmentation methods.","downloadable_attachments":[{"id":84705715,"asset_id":77236340,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":16447374,"first_name":"Prof. Dr. Yousif","last_name":"Abdallah","domain_name":"mu-sa","page_name":"YousifMohamedYousifAbdallah","display_name":"Prof. Dr. Yousif M . Y Abdallah","profile_url":"https://mu-sa.academia.edu/YousifMohamedYousifAbdallah?f_ri=1185","photo":"https://0.academia-photos.com/16447374/4481842/5192193/s65_yousif_mohamed_yousif.abdallah.jpg"}],"research_interests":[{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":4998,"name":"Medical Image Processing","url":"https://www.academia.edu/Documents/in/Medical_Image_Processing?f_ri=1185","nofollow":true},{"id":6802,"name":"Breast Cancer","url":"https://www.academia.edu/Documents/in/Breast_Cancer?f_ri=1185","nofollow":true},{"id":15811,"name":"Biomedical signal and image processing","url":"https://www.academia.edu/Documents/in/Biomedical_signal_and_image_processing?f_ri=1185","nofollow":true},{"id":25395,"name":"Matlab","url":"https://www.academia.edu/Documents/in/Matlab?f_ri=1185"},{"id":36237,"name":"Biomedical","url":"https://www.academia.edu/Documents/in/Biomedical?f_ri=1185"},{"id":158839,"name":"Breast Cancer Research","url":"https://www.academia.edu/Documents/in/Breast_Cancer_Research?f_ri=1185"},{"id":300330,"name":"Matlab \u0026 Simulink programming","url":"https://www.academia.edu/Documents/in/Matlab_and_Simulink_programming?f_ri=1185"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_70183553" data-work_id="70183553" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/70183553/Transparent_information_hiding_with_automatic_embedding_range_selection_for_ownership_verification">Transparent information hiding with automatic embedding range selection for ownership verification</a></div></div><div class="u-pb4x u-mt3x"></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/70183553" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="bae0c19c0a6fc2c442f168d62d53f9de" rel="nofollow" data-download="{&quot;attachment_id&quot;:80030944,&quot;asset_id&quot;:70183553,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/80030944/download_file?st=MTczOTcxMjAzNiw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="104715998" href="https://independent.academia.edu/FarookSattar2">Farook Sattar</a><script data-card-contents-for-user="104715998" type="text/json">{"id":104715998,"first_name":"Farook","last_name":"Sattar","domain_name":"independent","page_name":"FarookSattar2","display_name":"Farook Sattar","profile_url":"https://independent.academia.edu/FarookSattar2?f_ri=1185","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_70183553 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="70183553"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 70183553, container: ".js-paper-rank-work_70183553", }); });</script></li><li class="js-percentile-work_70183553 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 70183553; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_70183553"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_70183553 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="70183553"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 70183553; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=70183553]").text(description); $(".js-view-count-work_70183553").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_70183553").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="70183553"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">8</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="300" rel="nofollow" href="https://www.academia.edu/Documents/in/Mathematics">Mathematics</a>,&nbsp;<script data-card-contents-for-ri="300" type="text/json">{"id":300,"name":"Mathematics","url":"https://www.academia.edu/Documents/in/Mathematics?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="422" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Science">Computer Science</a>,&nbsp;<script data-card-contents-for-ri="422" type="text/json">{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="11193" rel="nofollow" href="https://www.academia.edu/Documents/in/Watermarking">Watermarking</a><script data-card-contents-for-ri="11193" type="text/json">{"id":11193,"name":"Watermarking","url":"https://www.academia.edu/Documents/in/Watermarking?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=70183553]'), work: {"id":70183553,"title":"Transparent information hiding with automatic embedding range selection for ownership verification","created_at":"2022-01-31T15:17:58.757-08:00","url":"https://www.academia.edu/70183553/Transparent_information_hiding_with_automatic_embedding_range_selection_for_ownership_verification?f_ri=1185","dom_id":"work_70183553","summary":null,"downloadable_attachments":[{"id":80030944,"asset_id":70183553,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":104715998,"first_name":"Farook","last_name":"Sattar","domain_name":"independent","page_name":"FarookSattar2","display_name":"Farook Sattar","profile_url":"https://independent.academia.edu/FarookSattar2?f_ri=1185","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":300,"name":"Mathematics","url":"https://www.academia.edu/Documents/in/Mathematics?f_ri=1185","nofollow":true},{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=1185","nofollow":true},{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":11193,"name":"Watermarking","url":"https://www.academia.edu/Documents/in/Watermarking?f_ri=1185","nofollow":true},{"id":122074,"name":"Information Hiding","url":"https://www.academia.edu/Documents/in/Information_Hiding?f_ri=1185"},{"id":185596,"name":"Discrete Cosine Transform","url":"https://www.academia.edu/Documents/in/Discrete_Cosine_Transform?f_ri=1185"},{"id":1374348,"name":"Discrete Cosine Transforms","url":"https://www.academia.edu/Documents/in/Discrete_Cosine_Transforms?f_ri=1185"},{"id":2965910,"name":"Perceptual Quality","url":"https://www.academia.edu/Documents/in/Perceptual_Quality?f_ri=1185"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_69726475" data-work_id="69726475" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/69726475/High_resolution_X_ray_imaging_and_analysis_of_coatings_on_and_in_wood">High-resolution X-ray imaging and analysis of coatings on and in wood</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Wood coatings are widely used for aesthetic and protective reasons. Assessment of coating performance during service life is crucial in order to establish a knowledge database for product optimization. A vast amount of techniques is... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_69726475" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Wood coatings are widely used for aesthetic and protective reasons. Assessment of coating performance during service life is crucial in order to establish a knowledge database for product optimization. A vast amount of techniques is available for analysis of a coating&#39;s behavior of which micro-imaging is an important tool. In addition to standard microscopy techniques, high-resolution X-ray tomography is presented as a modality offering non-destructive visualization of a coating and the substrate applied on. Combined with analysis of the 3D volumetric data, surface roughness, structure and thickness of the coating layer, penetration depth and related mechanical anchoring can be studied in relation with the underlying substrate. To provide a clear illustration of the possibilities and limitations of this technique, both an opaque solvent-borne and an opaque water-borne coating applied on two different wood types were scanned and analyzed. Clearly, three-dimensional X-ray imaging at high resolution produces valuable information merely by visualization. Moreover by proper analysis quantitative data is obtained taking into account the limitations of Xray computed tomography and of automated image processing.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/69726475" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="bdbb707626d2cb86a5d1564c2b2a5801" rel="nofollow" data-download="{&quot;attachment_id&quot;:79711754,&quot;asset_id&quot;:69726475,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/79711754/download_file?st=MTczOTcxMjAzNiw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="32723218" href="https://ugent.academia.edu/JorisVanAcker">Joris C R Van Acker</a><script data-card-contents-for-user="32723218" type="text/json">{"id":32723218,"first_name":"Joris","last_name":"Van Acker","domain_name":"ugent","page_name":"JorisVanAcker","display_name":"Joris C R Van Acker","profile_url":"https://ugent.academia.edu/JorisVanAcker?f_ri=1185","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_69726475 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="69726475"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 69726475, container: ".js-paper-rank-work_69726475", }); });</script></li><li class="js-percentile-work_69726475 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 69726475; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_69726475"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_69726475 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="69726475"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 69726475; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=69726475]").text(description); $(".js-view-count-work_69726475").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_69726475").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="69726475"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">16</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="56" rel="nofollow" href="https://www.academia.edu/Documents/in/Materials_Engineering">Materials Engineering</a>,&nbsp;<script data-card-contents-for-ri="56" type="text/json">{"id":56,"name":"Materials Engineering","url":"https://www.academia.edu/Documents/in/Materials_Engineering?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="511" rel="nofollow" href="https://www.academia.edu/Documents/in/Materials_Science">Materials Science</a>,&nbsp;<script data-card-contents-for-ri="511" type="text/json">{"id":511,"name":"Materials Science","url":"https://www.academia.edu/Documents/in/Materials_Science?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="10654" rel="nofollow" href="https://www.academia.edu/Documents/in/X-ray_imaging">X-ray imaging</a><script data-card-contents-for-ri="10654" type="text/json">{"id":10654,"name":"X-ray imaging","url":"https://www.academia.edu/Documents/in/X-ray_imaging?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=69726475]'), work: {"id":69726475,"title":"High-resolution X-ray imaging and analysis of coatings on and in wood","created_at":"2022-01-27T23:13:01.722-08:00","url":"https://www.academia.edu/69726475/High_resolution_X_ray_imaging_and_analysis_of_coatings_on_and_in_wood?f_ri=1185","dom_id":"work_69726475","summary":"Wood coatings are widely used for aesthetic and protective reasons. Assessment of coating performance during service life is crucial in order to establish a knowledge database for product optimization. A vast amount of techniques is available for analysis of a coating's behavior of which micro-imaging is an important tool. In addition to standard microscopy techniques, high-resolution X-ray tomography is presented as a modality offering non-destructive visualization of a coating and the substrate applied on. Combined with analysis of the 3D volumetric data, surface roughness, structure and thickness of the coating layer, penetration depth and related mechanical anchoring can be studied in relation with the underlying substrate. To provide a clear illustration of the possibilities and limitations of this technique, both an opaque solvent-borne and an opaque water-borne coating applied on two different wood types were scanned and analyzed. Clearly, three-dimensional X-ray imaging at high resolution produces valuable information merely by visualization. Moreover by proper analysis quantitative data is obtained taking into account the limitations of Xray computed tomography and of automated image processing.","downloadable_attachments":[{"id":79711754,"asset_id":69726475,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":32723218,"first_name":"Joris","last_name":"Van Acker","domain_name":"ugent","page_name":"JorisVanAcker","display_name":"Joris C R Van Acker","profile_url":"https://ugent.academia.edu/JorisVanAcker?f_ri=1185","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":56,"name":"Materials Engineering","url":"https://www.academia.edu/Documents/in/Materials_Engineering?f_ri=1185","nofollow":true},{"id":511,"name":"Materials Science","url":"https://www.academia.edu/Documents/in/Materials_Science?f_ri=1185","nofollow":true},{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":10654,"name":"X-ray imaging","url":"https://www.academia.edu/Documents/in/X-ray_imaging?f_ri=1185","nofollow":true},{"id":33296,"name":"Surface Roughness","url":"https://www.academia.edu/Documents/in/Surface_Roughness?f_ri=1185"},{"id":71578,"name":"Wood","url":"https://www.academia.edu/Documents/in/Wood?f_ri=1185"},{"id":125057,"name":"Coatings","url":"https://www.academia.edu/Documents/in/Coatings?f_ri=1185"},{"id":307514,"name":"Surface and Coatings Technology","url":"https://www.academia.edu/Documents/in/Surface_and_Coatings_Technology?f_ri=1185"},{"id":309086,"name":"High Resolution","url":"https://www.academia.edu/Documents/in/High_Resolution?f_ri=1185"},{"id":413023,"name":"Roughness","url":"https://www.academia.edu/Documents/in/Roughness?f_ri=1185"},{"id":488106,"name":"Service Life","url":"https://www.academia.edu/Documents/in/Service_Life?f_ri=1185"},{"id":504035,"name":"Three Dimensional","url":"https://www.academia.edu/Documents/in/Three_Dimensional?f_ri=1185"},{"id":832539,"name":"Penetration Depth","url":"https://www.academia.edu/Documents/in/Penetration_Depth?f_ri=1185"},{"id":959921,"name":"X ray Computed Tomography","url":"https://www.academia.edu/Documents/in/X_ray_Computed_Tomography?f_ri=1185"},{"id":1030027,"name":"Softwood","url":"https://www.academia.edu/Documents/in/Softwood?f_ri=1185"},{"id":1188947,"name":"D","url":"https://www.academia.edu/Documents/in/D-351414216?f_ri=1185"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_68985406" data-work_id="68985406" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" rel="nofollow" href="https://www.academia.edu/68985406/A_new_colour_image_segmentation">A new colour image segmentation</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">In this paper an unsupervised colour image segmentation algorithm is presented. This method combines the advantages of the approaches based on split&amp;amp;merge and region growing, and the use of the RGB and HSV colour representation... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_68985406" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">In this paper an unsupervised colour image segmentation algorithm is presented. This method combines the advantages of the approaches based on split&amp;amp;merge and region growing, and the use of the RGB and HSV colour representation models. The effectiveness of the proposed method has been verified by the implementation of the algorithm using three different testing images with homogeneous regions, spatially compact and continuous. It was observed that the proposed algorithm outperforms the other analysed techniques requiring shorter processing time when compared with the other analysed methods.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/68985406" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="31194093" href="https://psiucv.academia.edu/gastonlefranc">Gaston Lefranc</a><script data-card-contents-for-user="31194093" type="text/json">{"id":31194093,"first_name":"Gaston","last_name":"Lefranc","domain_name":"psiucv","page_name":"gastonlefranc","display_name":"Gaston Lefranc","profile_url":"https://psiucv.academia.edu/gastonlefranc?f_ri=1185","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_68985406 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="68985406"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 68985406, container: ".js-paper-rank-work_68985406", }); });</script></li><li class="js-percentile-work_68985406 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 68985406; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_68985406"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_68985406 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="68985406"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 68985406; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=68985406]").text(description); $(".js-view-count-work_68985406").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_68985406").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="68985406"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">4</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="300" rel="nofollow" href="https://www.academia.edu/Documents/in/Mathematics">Mathematics</a>,&nbsp;<script data-card-contents-for-ri="300" type="text/json">{"id":300,"name":"Mathematics","url":"https://www.academia.edu/Documents/in/Mathematics?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="854" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Vision">Computer Vision</a>,&nbsp;<script data-card-contents-for-ri="854" type="text/json">{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="26870" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_segmentation">Image segmentation</a><script data-card-contents-for-ri="26870" type="text/json">{"id":26870,"name":"Image segmentation","url":"https://www.academia.edu/Documents/in/Image_segmentation?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=68985406]'), work: {"id":68985406,"title":"A new colour image segmentation","created_at":"2022-01-21T04:51:39.952-08:00","url":"https://www.academia.edu/68985406/A_new_colour_image_segmentation?f_ri=1185","dom_id":"work_68985406","summary":"In this paper an unsupervised colour image segmentation algorithm is presented. This method combines the advantages of the approaches based on split\u0026amp;merge and region growing, and the use of the RGB and HSV colour representation models. The effectiveness of the proposed method has been verified by the implementation of the algorithm using three different testing images with homogeneous regions, spatially compact and continuous. It was observed that the proposed algorithm outperforms the other analysed techniques requiring shorter processing time when compared with the other analysed methods.","downloadable_attachments":[],"ordered_authors":[{"id":31194093,"first_name":"Gaston","last_name":"Lefranc","domain_name":"psiucv","page_name":"gastonlefranc","display_name":"Gaston Lefranc","profile_url":"https://psiucv.academia.edu/gastonlefranc?f_ri=1185","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":300,"name":"Mathematics","url":"https://www.academia.edu/Documents/in/Mathematics?f_ri=1185","nofollow":true},{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision?f_ri=1185","nofollow":true},{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":26870,"name":"Image segmentation","url":"https://www.academia.edu/Documents/in/Image_segmentation?f_ri=1185","nofollow":true}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_68507357" data-work_id="68507357" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/68507357/An_Interactive_and_Immersive_System_that_dynamically_adapts_2_D_projections_to_the_location_of_a_user_s_eyes">An Interactive and Immersive System that dynamically adapts 2 D projections to the location of a user ’ s eyes</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">This paper presents a non-intrusive system that gives the illusion of a 3D immersive and interactive environment with 2D projectors. The user does not need to wear glasses, nor to watch a (limited) screen. The virtual world is all around... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_68507357" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">This paper presents a non-intrusive system that gives the illusion of a 3D immersive and interactive environment with 2D projectors. The user does not need to wear glasses, nor to watch a (limited) screen. The virtual world is all around him, drawn on the floor. As the user is himself immersed in the virtual world, there is no need for a proxy like an avatar; he can move inside the virtual environment freely. Moreover, the I-see-3D system allows a user to manipulate virtual objects with his own body, making interactions with the virtual world very intuitive. Giving the illusion of 3D requires to render images in such a way that the deformation of the image projected on the floor is taken into account, as well as the position of the user’s “eye” in its virtual world. The resulting projection is neither perspective nor orthographic. Nevertheless, we describe how this can be implemented with the standard OpenGL pipeline, without any shader. Our experiments demonstrate that our system i...</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/68507357" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="94ac323a1ee71db46eea1c443fdd8f02" rel="nofollow" data-download="{&quot;attachment_id&quot;:78957209,&quot;asset_id&quot;:68507357,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/78957209/download_file?st=MTczOTcxMjAzNiw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="200453567" href="https://independent.academia.edu/S%C3%A9bastienPi%C3%A9rard">Sébastien Piérard</a><script data-card-contents-for-user="200453567" type="text/json">{"id":200453567,"first_name":"Sébastien","last_name":"Piérard","domain_name":"independent","page_name":"SébastienPiérard","display_name":"Sébastien Piérard","profile_url":"https://independent.academia.edu/S%C3%A9bastienPi%C3%A9rard?f_ri=1185","photo":"https://0.academia-photos.com/200453567/113769902/103043245/s65_s_bastien.pi_rard.jpg"}</script></span></span></li><li class="js-paper-rank-work_68507357 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="68507357"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 68507357, container: ".js-paper-rank-work_68507357", }); });</script></li><li class="js-percentile-work_68507357 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 68507357; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_68507357"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_68507357 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="68507357"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 68507357; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=68507357]").text(description); $(".js-view-count-work_68507357").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_68507357").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="68507357"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">8</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="51200" rel="nofollow" href="https://www.academia.edu/Documents/in/Immersion">Immersion</a>,&nbsp;<script data-card-contents-for-ri="51200" type="text/json">{"id":51200,"name":"Immersion","url":"https://www.academia.edu/Documents/in/Immersion?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="67380" rel="nofollow" href="https://www.academia.edu/Documents/in/Kalman_Filtering">Kalman Filtering</a>,&nbsp;<script data-card-contents-for-ri="67380" type="text/json">{"id":67380,"name":"Kalman Filtering","url":"https://www.academia.edu/Documents/in/Kalman_Filtering?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="90025" rel="nofollow" href="https://www.academia.edu/Documents/in/Tracking">Tracking</a><script data-card-contents-for-ri="90025" type="text/json">{"id":90025,"name":"Tracking","url":"https://www.academia.edu/Documents/in/Tracking?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=68507357]'), work: {"id":68507357,"title":"An Interactive and Immersive System that dynamically adapts 2 D projections to the location of a user ’ s eyes","created_at":"2022-01-17T02:47:29.691-08:00","url":"https://www.academia.edu/68507357/An_Interactive_and_Immersive_System_that_dynamically_adapts_2_D_projections_to_the_location_of_a_user_s_eyes?f_ri=1185","dom_id":"work_68507357","summary":"This paper presents a non-intrusive system that gives the illusion of a 3D immersive and interactive environment with 2D projectors. The user does not need to wear glasses, nor to watch a (limited) screen. The virtual world is all around him, drawn on the floor. As the user is himself immersed in the virtual world, there is no need for a proxy like an avatar; he can move inside the virtual environment freely. Moreover, the I-see-3D system allows a user to manipulate virtual objects with his own body, making interactions with the virtual world very intuitive. Giving the illusion of 3D requires to render images in such a way that the deformation of the image projected on the floor is taken into account, as well as the position of the user’s “eye” in its virtual world. The resulting projection is neither perspective nor orthographic. Nevertheless, we describe how this can be implemented with the standard OpenGL pipeline, without any shader. Our experiments demonstrate that our system i...","downloadable_attachments":[{"id":78957209,"asset_id":68507357,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":200453567,"first_name":"Sébastien","last_name":"Piérard","domain_name":"independent","page_name":"SébastienPiérard","display_name":"Sébastien Piérard","profile_url":"https://independent.academia.edu/S%C3%A9bastienPi%C3%A9rard?f_ri=1185","photo":"https://0.academia-photos.com/200453567/113769902/103043245/s65_s_bastien.pi_rard.jpg"}],"research_interests":[{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":51200,"name":"Immersion","url":"https://www.academia.edu/Documents/in/Immersion?f_ri=1185","nofollow":true},{"id":67380,"name":"Kalman Filtering","url":"https://www.academia.edu/Documents/in/Kalman_Filtering?f_ri=1185","nofollow":true},{"id":90025,"name":"Tracking","url":"https://www.academia.edu/Documents/in/Tracking?f_ri=1185","nofollow":true},{"id":91387,"name":"Kinect","url":"https://www.academia.edu/Documents/in/Kinect?f_ri=1185"},{"id":96893,"name":"Calibration","url":"https://www.academia.edu/Documents/in/Calibration?f_ri=1185"},{"id":154521,"name":"Projection","url":"https://www.academia.edu/Documents/in/Projection?f_ri=1185"},{"id":1188947,"name":"D","url":"https://www.academia.edu/Documents/in/D-351414216?f_ri=1185"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_65774246" data-work_id="65774246" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/65774246/Automated_Maintenance_Approach_for_Industrial_Machineries_by_Soft_Computing_Techniques_at_Offline_Monitoring_Process">Automated Maintenance Approach for Industrial Machineries by Soft Computing Techniques at Offline Monitoring Process</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Fault diagnosis of industrial machineries become very much important for improving the quality of the manufacturing as well as for reducing the cost for product testing. In modern manufacturing scenario, a fast and reliable diagnosis... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_65774246" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Fault diagnosis of industrial machineries become very much important for improving the quality of the manufacturing as well as for reducing the cost for product testing. In modern manufacturing scenario, a fast and reliable diagnosis system has turned into a challenging issue in the complex industrial environment. In this work, the diagnosis of gearbox is considered as a mean of health monitoring system by used lubricant. The proposed methodology has been performed on the basis of wear particle analysis in gearbox at offline stage. Possible wear characterization has been done by image vision system to interpret into soft computing techniques like fuzzy inference and neural network mechanisms. Basically, the maintenance policy has been taken with the help of fuzzy expert system, which has been described in the present work.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/65774246" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="b239b879835df2fb10a551ddad155cf8" rel="nofollow" data-download="{&quot;attachment_id&quot;:77225154,&quot;asset_id&quot;:65774246,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/77225154/download_file?st=MTczOTcxMjAzNiw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="173580174" href="https://independent.academia.edu/SUROJITGHOSH24">SUROJIT GHOSH</a><script data-card-contents-for-user="173580174" type="text/json">{"id":173580174,"first_name":"SUROJIT","last_name":"GHOSH","domain_name":"independent","page_name":"SUROJITGHOSH24","display_name":"SUROJIT GHOSH","profile_url":"https://independent.academia.edu/SUROJITGHOSH24?f_ri=1185","photo":"https://0.academia-photos.com/173580174/49563149/37542527/s65_surojit.ghosh.png"}</script></span></span></li><li class="js-paper-rank-work_65774246 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="65774246"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 65774246, container: ".js-paper-rank-work_65774246", }); });</script></li><li class="js-percentile-work_65774246 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 65774246; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_65774246"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_65774246 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="65774246"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 65774246; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=65774246]").text(description); $(".js-view-count-work_65774246").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_65774246").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="65774246"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">14</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="48" rel="nofollow" href="https://www.academia.edu/Documents/in/Engineering">Engineering</a>,&nbsp;<script data-card-contents-for-ri="48" type="text/json">{"id":48,"name":"Engineering","url":"https://www.academia.edu/Documents/in/Engineering?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="60" rel="nofollow" href="https://www.academia.edu/Documents/in/Mechanical_Engineering">Mechanical Engineering</a>,&nbsp;<script data-card-contents-for-ri="60" type="text/json">{"id":60,"name":"Mechanical Engineering","url":"https://www.academia.edu/Documents/in/Mechanical_Engineering?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="6132" rel="nofollow" href="https://www.academia.edu/Documents/in/Soft_Computing">Soft Computing</a><script data-card-contents-for-ri="6132" type="text/json">{"id":6132,"name":"Soft Computing","url":"https://www.academia.edu/Documents/in/Soft_Computing?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=65774246]'), work: {"id":65774246,"title":"Automated Maintenance Approach for Industrial Machineries by Soft Computing Techniques at Offline Monitoring Process","created_at":"2021-12-23T20:06:20.523-08:00","url":"https://www.academia.edu/65774246/Automated_Maintenance_Approach_for_Industrial_Machineries_by_Soft_Computing_Techniques_at_Offline_Monitoring_Process?f_ri=1185","dom_id":"work_65774246","summary":"Fault diagnosis of industrial machineries become very much important for improving the quality of the manufacturing as well as for reducing the cost for product testing. In modern manufacturing scenario, a fast and reliable diagnosis system has turned into a challenging issue in the complex industrial environment. In this work, the diagnosis of gearbox is considered as a mean of health monitoring system by used lubricant. The proposed methodology has been performed on the basis of wear particle analysis in gearbox at offline stage. Possible wear characterization has been done by image vision system to interpret into soft computing techniques like fuzzy inference and neural network mechanisms. Basically, the maintenance policy has been taken with the help of fuzzy expert system, which has been described in the present work.","downloadable_attachments":[{"id":77225154,"asset_id":65774246,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":173580174,"first_name":"SUROJIT","last_name":"GHOSH","domain_name":"independent","page_name":"SUROJITGHOSH24","display_name":"SUROJIT GHOSH","profile_url":"https://independent.academia.edu/SUROJITGHOSH24?f_ri=1185","photo":"https://0.academia-photos.com/173580174/49563149/37542527/s65_surojit.ghosh.png"}],"research_interests":[{"id":48,"name":"Engineering","url":"https://www.academia.edu/Documents/in/Engineering?f_ri=1185","nofollow":true},{"id":60,"name":"Mechanical Engineering","url":"https://www.academia.edu/Documents/in/Mechanical_Engineering?f_ri=1185","nofollow":true},{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":6132,"name":"Soft Computing","url":"https://www.academia.edu/Documents/in/Soft_Computing?f_ri=1185","nofollow":true},{"id":10685,"name":"Fuzzy Expert System","url":"https://www.academia.edu/Documents/in/Fuzzy_Expert_System?f_ri=1185"},{"id":14305,"name":"Industrial Engineering","url":"https://www.academia.edu/Documents/in/Industrial_Engineering?f_ri=1185"},{"id":20307,"name":"PRODUCTION ENGINEERING","url":"https://www.academia.edu/Documents/in/PRODUCTION_ENGINEERING?f_ri=1185"},{"id":26066,"name":"Neural Network","url":"https://www.academia.edu/Documents/in/Neural_Network?f_ri=1185"},{"id":27010,"name":"Fuzzy Inference","url":"https://www.academia.edu/Documents/in/Fuzzy_Inference?f_ri=1185"},{"id":117101,"name":"Fault diagnosis","url":"https://www.academia.edu/Documents/in/Fault_diagnosis?f_ri=1185"},{"id":213658,"name":"Health monitoring","url":"https://www.academia.edu/Documents/in/Health_monitoring?f_ri=1185"},{"id":314271,"name":"Fuzzy System","url":"https://www.academia.edu/Documents/in/Fuzzy_System?f_ri=1185"},{"id":1211304,"name":"Artificial Neural Network","url":"https://www.academia.edu/Documents/in/Artificial_Neural_Network?f_ri=1185"},{"id":3480612,"name":"Vision system","url":"https://www.academia.edu/Documents/in/Vision_system?f_ri=1185"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_62142664" data-work_id="62142664" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/62142664/Optical_Character_Recognition_Based_Hand_Held_Device_for_Printed_Text_to_Braille_Conversion">Optical Character Recognition Based Hand-Held Device for Printed Text to Braille Conversion</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">In this paper we propose to develop a device that can be used by the visually challenged to read normal English books. Here we focus on letter-by-letter segmentation, recognition and transliteration to the Braille format. The device would... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_62142664" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">In this paper we propose to develop a device that can be used by the visually challenged to read normal English books. Here we focus on letter-by-letter segmentation, recognition and transliteration to the Braille format. The device would use on board software to do the recognition and conversion. The recognized characters are transmitted to the interface which converts the characters to the Braille format which can be felt-read by the visually challenged. The device would be cheaper among its counterparts.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/62142664" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="b7c0c48660ec873cdd99e51ac1f682da" rel="nofollow" data-download="{&quot;attachment_id&quot;:74982142,&quot;asset_id&quot;:62142664,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/74982142/download_file?st=MTczOTcxMjAzNiw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="40894429" href="https://independent.academia.edu/amaladeepan">amala deepan</a><script data-card-contents-for-user="40894429" type="text/json">{"id":40894429,"first_name":"amala","last_name":"deepan","domain_name":"independent","page_name":"amaladeepan","display_name":"amala deepan","profile_url":"https://independent.academia.edu/amaladeepan?f_ri=1185","photo":"https://0.academia-photos.com/40894429/26678161/25179079/s65_amala.deepan.jpg"}</script></span></span></li><li class="js-paper-rank-work_62142664 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="62142664"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 62142664, container: ".js-paper-rank-work_62142664", }); });</script></li><li class="js-percentile-work_62142664 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 62142664; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_62142664"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_62142664 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="62142664"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 62142664; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=62142664]").text(description); $(".js-view-count-work_62142664").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_62142664").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="62142664"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">6</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="854" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Vision">Computer Vision</a>,&nbsp;<script data-card-contents-for-ri="854" type="text/json">{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="2008" rel="nofollow" href="https://www.academia.edu/Documents/in/Machine_Learning">Machine Learning</a>,&nbsp;<script data-card-contents-for-ri="2008" type="text/json">{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5473" rel="nofollow" href="https://www.academia.edu/Documents/in/Embedded_Systems">Embedded Systems</a><script data-card-contents-for-ri="5473" type="text/json">{"id":5473,"name":"Embedded Systems","url":"https://www.academia.edu/Documents/in/Embedded_Systems?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=62142664]'), work: {"id":62142664,"title":"Optical Character Recognition Based Hand-Held Device for Printed Text to Braille Conversion","created_at":"2021-11-21T17:14:51.100-08:00","url":"https://www.academia.edu/62142664/Optical_Character_Recognition_Based_Hand_Held_Device_for_Printed_Text_to_Braille_Conversion?f_ri=1185","dom_id":"work_62142664","summary":"In this paper we propose to develop a device that can be used by the visually challenged to read normal English books. Here we focus on letter-by-letter segmentation, recognition and transliteration to the Braille format. The device would use on board software to do the recognition and conversion. The recognized characters are transmitted to the interface which converts the characters to the Braille format which can be felt-read by the visually challenged. The device would be cheaper among its counterparts.","downloadable_attachments":[{"id":74982142,"asset_id":62142664,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":40894429,"first_name":"amala","last_name":"deepan","domain_name":"independent","page_name":"amaladeepan","display_name":"amala deepan","profile_url":"https://independent.academia.edu/amaladeepan?f_ri=1185","photo":"https://0.academia-photos.com/40894429/26678161/25179079/s65_amala.deepan.jpg"}],"research_interests":[{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision?f_ri=1185","nofollow":true},{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=1185","nofollow":true},{"id":5473,"name":"Embedded Systems","url":"https://www.academia.edu/Documents/in/Embedded_Systems?f_ri=1185","nofollow":true},{"id":102883,"name":"Real Time Systems","url":"https://www.academia.edu/Documents/in/Real_Time_Systems?f_ri=1185"},{"id":265584,"name":"Optical Character Recognition","url":"https://www.academia.edu/Documents/in/Optical_Character_Recognition?f_ri=1185"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_62048032" data-work_id="62048032" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/62048032/Adaptation_of_High_Resolution_Ikonos_Images_to_Googleearth_for_Zonguldak_Test_Field">Adaptation of High Resolution Ikonos Images to Googleearth for Zonguldak Test Field</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">At the present time, remote sensing technologies are being improved and day by day, by launching new satellites with new developed techniques, images can be obtained faster with high resolution. The satellites which had been launched... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_62048032" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">At the present time, remote sensing technologies are being improved and day by day, by launching new satellites with new developed techniques, images can be obtained faster with high resolution. The satellites which had been launched recent years for example; Ikonos, Quickbird and Orbview-3 have 1m and better resolution, continue their missions with quite success.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/62048032" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="356fe7b8c3bf54764cfc15eaec0a9c99" rel="nofollow" data-download="{&quot;attachment_id&quot;:74916058,&quot;asset_id&quot;:62048032,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/74916058/download_file?st=MTczOTcxMjAzNiw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="181874780" href="https://gtu-tr.academia.edu/UmutGunesSefercik">Umut Gunes Sefercik</a><script data-card-contents-for-user="181874780" type="text/json">{"id":181874780,"first_name":"Umut Gunes","last_name":"Sefercik","domain_name":"gtu-tr","page_name":"UmutGunesSefercik","display_name":"Umut Gunes Sefercik","profile_url":"https://gtu-tr.academia.edu/UmutGunesSefercik?f_ri=1185","photo":"https://0.academia-photos.com/181874780/50381024/38398197/s65_umut_gunes.sefercik.jpg"}</script></span></span></li><li class="js-paper-rank-work_62048032 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="62048032"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 62048032, container: ".js-paper-rank-work_62048032", }); });</script></li><li class="js-percentile-work_62048032 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 62048032; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_62048032"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_62048032 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="62048032"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 62048032; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=62048032]").text(description); $(".js-view-count-work_62048032").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_62048032").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="62048032"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">16</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1252" rel="nofollow" href="https://www.academia.edu/Documents/in/Remote_Sensing">Remote Sensing</a>,&nbsp;<script data-card-contents-for-ri="1252" type="text/json">{"id":1252,"name":"Remote Sensing","url":"https://www.academia.edu/Documents/in/Remote_Sensing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="23408" rel="nofollow" href="https://www.academia.edu/Documents/in/Remote_sensing_and_GIS">Remote sensing and GIS</a>,&nbsp;<script data-card-contents-for-ri="23408" type="text/json">{"id":23408,"name":"Remote sensing and GIS","url":"https://www.academia.edu/Documents/in/Remote_sensing_and_GIS?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="48769" rel="nofollow" href="https://www.academia.edu/Documents/in/Data_Integration">Data Integration</a><script data-card-contents-for-ri="48769" type="text/json">{"id":48769,"name":"Data Integration","url":"https://www.academia.edu/Documents/in/Data_Integration?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=62048032]'), work: {"id":62048032,"title":"Adaptation of High Resolution Ikonos Images to Googleearth for Zonguldak Test Field","created_at":"2021-11-19T23:19:24.995-08:00","url":"https://www.academia.edu/62048032/Adaptation_of_High_Resolution_Ikonos_Images_to_Googleearth_for_Zonguldak_Test_Field?f_ri=1185","dom_id":"work_62048032","summary":"At the present time, remote sensing technologies are being improved and day by day, by launching new satellites with new developed techniques, images can be obtained faster with high resolution. The satellites which had been launched recent years for example; Ikonos, Quickbird and Orbview-3 have 1m and better resolution, continue their missions with quite success.","downloadable_attachments":[{"id":74916058,"asset_id":62048032,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":181874780,"first_name":"Umut Gunes","last_name":"Sefercik","domain_name":"gtu-tr","page_name":"UmutGunesSefercik","display_name":"Umut Gunes Sefercik","profile_url":"https://gtu-tr.academia.edu/UmutGunesSefercik?f_ri=1185","photo":"https://0.academia-photos.com/181874780/50381024/38398197/s65_umut_gunes.sefercik.jpg"}],"research_interests":[{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":1252,"name":"Remote Sensing","url":"https://www.academia.edu/Documents/in/Remote_Sensing?f_ri=1185","nofollow":true},{"id":23408,"name":"Remote sensing and GIS","url":"https://www.academia.edu/Documents/in/Remote_sensing_and_GIS?f_ri=1185","nofollow":true},{"id":48769,"name":"Data Integration","url":"https://www.academia.edu/Documents/in/Data_Integration?f_ri=1185","nofollow":true},{"id":59695,"name":"Navigation","url":"https://www.academia.edu/Documents/in/Navigation?f_ri=1185"},{"id":75777,"name":"Geomatics Engineering","url":"https://www.academia.edu/Documents/in/Geomatics_Engineering?f_ri=1185"},{"id":94504,"name":"Image Understanding","url":"https://www.academia.edu/Documents/in/Image_Understanding?f_ri=1185"},{"id":309086,"name":"High Resolution","url":"https://www.academia.edu/Documents/in/High_Resolution?f_ri=1185"},{"id":313898,"name":"Information Content","url":"https://www.academia.edu/Documents/in/Information_Content?f_ri=1185"},{"id":341805,"name":"Data Integrity","url":"https://www.academia.edu/Documents/in/Data_Integrity?f_ri=1185"},{"id":400982,"name":"Topographic Map","url":"https://www.academia.edu/Documents/in/Topographic_Map?f_ri=1185"},{"id":478709,"name":"Ikonos","url":"https://www.academia.edu/Documents/in/Ikonos?f_ri=1185"},{"id":1009312,"name":"Geographic Information Systems (GIS)","url":"https://www.academia.edu/Documents/in/Geographic_Information_Systems_GIS_?f_ri=1185"},{"id":1211138,"name":"Geographic Information System","url":"https://www.academia.edu/Documents/in/Geographic_Information_System?f_ri=1185"},{"id":2389604,"name":"GoogleEarth Pro","url":"https://www.academia.edu/Documents/in/GoogleEarth_Pro?f_ri=1185"},{"id":4041490,"name":"Satellite Image","url":"https://www.academia.edu/Documents/in/Satellite_Image?f_ri=1185"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_58176057" data-work_id="58176057" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/58176057/Fourier_Mellin_registration_of_line_delineated_tabular_document_images">Fourier–Mellin registration of line-delineated tabular document images</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Image registration (or alignment) is a useful preprocessing tool for assisting in manual data extraction from handwritten forms, as well as for preparing documents for batch OCR of specific page regions. A new technique is presented for... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_58176057" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Image registration (or alignment) is a useful preprocessing tool for assisting in manual data extraction from handwritten forms, as well as for preparing documents for batch OCR of specific page regions. A new technique is presented for fast registration of lined tabular document images in the presence of a global affine transformation, using the Discrete Fourier-Mellin Transform (DFMT). Each component of the affine transform is handled separately, which dramatically reduces the total parameter space of the problem. This method is robust and deals with all components of the affine transform in a uniform way by working in the frequency domain. The DFMT is extended to handle shear, which can approximate a small amount of perspective distortion. In order to limit registration to foreground pixels only, and to eliminate Fourier edge effects, a novel, locally adaptive foreground-background segmentation algorithm is introduced, based on the median filter, which eliminates the need for Blackman windowing as usually required by DFMT image registration. A novel information-theoretic optimization of the median filter is presented. An original method is demonstrated for automatically obtaining blank document templates from a set of registered document images.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/58176057" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="e3693f9a8415fa2e158f2ce82583a508" rel="nofollow" data-download="{&quot;attachment_id&quot;:72714289,&quot;asset_id&quot;:58176057,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/72714289/download_file?st=MTczOTcxMjAzNiw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="31165315" href="https://independent.academia.edu/BarrettBill">Bill Barrett</a><script data-card-contents-for-user="31165315" type="text/json">{"id":31165315,"first_name":"Bill","last_name":"Barrett","domain_name":"independent","page_name":"BarrettBill","display_name":"Bill Barrett","profile_url":"https://independent.academia.edu/BarrettBill?f_ri=1185","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_58176057 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="58176057"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 58176057, container: ".js-paper-rank-work_58176057", }); });</script></li><li class="js-percentile-work_58176057 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 58176057; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_58176057"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_58176057 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="58176057"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 58176057; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=58176057]").text(description); $(".js-view-count-work_58176057").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_58176057").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="58176057"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">16</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1410" rel="nofollow" href="https://www.academia.edu/Documents/in/Information_Theory">Information Theory</a>,&nbsp;<script data-card-contents-for-ri="1410" type="text/json">{"id":1410,"name":"Information Theory","url":"https://www.academia.edu/Documents/in/Information_Theory?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="29731" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Registration">Image Registration</a>,&nbsp;<script data-card-contents-for-ri="29731" type="text/json">{"id":29731,"name":"Image Registration","url":"https://www.academia.edu/Documents/in/Image_Registration?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="32159" rel="nofollow" href="https://www.academia.edu/Documents/in/Family_history">Family history</a><script data-card-contents-for-ri="32159" type="text/json">{"id":32159,"name":"Family history","url":"https://www.academia.edu/Documents/in/Family_history?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=58176057]'), work: {"id":58176057,"title":"Fourier–Mellin registration of line-delineated tabular document images","created_at":"2021-10-15T10:33:19.361-07:00","url":"https://www.academia.edu/58176057/Fourier_Mellin_registration_of_line_delineated_tabular_document_images?f_ri=1185","dom_id":"work_58176057","summary":"Image registration (or alignment) is a useful preprocessing tool for assisting in manual data extraction from handwritten forms, as well as for preparing documents for batch OCR of specific page regions. A new technique is presented for fast registration of lined tabular document images in the presence of a global affine transformation, using the Discrete Fourier-Mellin Transform (DFMT). Each component of the affine transform is handled separately, which dramatically reduces the total parameter space of the problem. This method is robust and deals with all components of the affine transform in a uniform way by working in the frequency domain. The DFMT is extended to handle shear, which can approximate a small amount of perspective distortion. In order to limit registration to foreground pixels only, and to eliminate Fourier edge effects, a novel, locally adaptive foreground-background segmentation algorithm is introduced, based on the median filter, which eliminates the need for Blackman windowing as usually required by DFMT image registration. A novel information-theoretic optimization of the median filter is presented. An original method is demonstrated for automatically obtaining blank document templates from a set of registered document images.","downloadable_attachments":[{"id":72714289,"asset_id":58176057,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":31165315,"first_name":"Bill","last_name":"Barrett","domain_name":"independent","page_name":"BarrettBill","display_name":"Bill Barrett","profile_url":"https://independent.academia.edu/BarrettBill?f_ri=1185","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":1410,"name":"Information Theory","url":"https://www.academia.edu/Documents/in/Information_Theory?f_ri=1185","nofollow":true},{"id":29731,"name":"Image Registration","url":"https://www.academia.edu/Documents/in/Image_Registration?f_ri=1185","nofollow":true},{"id":32159,"name":"Family history","url":"https://www.academia.edu/Documents/in/Family_history?f_ri=1185","nofollow":true},{"id":43981,"name":"Optimization","url":"https://www.academia.edu/Documents/in/Optimization?f_ri=1185"},{"id":71898,"name":"Perspective","url":"https://www.academia.edu/Documents/in/Perspective?f_ri=1185"},{"id":93217,"name":"Segmentation","url":"https://www.academia.edu/Documents/in/Segmentation?f_ri=1185"},{"id":96798,"name":"Noise reduction","url":"https://www.academia.edu/Documents/in/Noise_reduction?f_ri=1185"},{"id":155196,"name":"Median Filter","url":"https://www.academia.edu/Documents/in/Median_Filter?f_ri=1185"},{"id":775583,"name":"Shear","url":"https://www.academia.edu/Documents/in/Shear?f_ri=1185"},{"id":903929,"name":"Batch Process","url":"https://www.academia.edu/Documents/in/Batch_Process?f_ri=1185"},{"id":1288273,"name":"Fourier Transformation","url":"https://www.academia.edu/Documents/in/Fourier_Transformation?f_ri=1185"},{"id":1554856,"name":"Affine Transformation","url":"https://www.academia.edu/Documents/in/Affine_Transformation?f_ri=1185"},{"id":1625072,"name":"Frequency Domain","url":"https://www.academia.edu/Documents/in/Frequency_Domain?f_ri=1185"},{"id":2513107,"name":"parameter space","url":"https://www.academia.edu/Documents/in/parameter_space?f_ri=1185"},{"id":2700712,"name":"Mellin transform","url":"https://www.academia.edu/Documents/in/Mellin_transform?f_ri=1185"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_56761056" data-work_id="56761056" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/56761056/Blood_vessel_segmentation_from_color_retinal_images_using_unsupervised_texture_classification">Blood vessel segmentation from color retinal images using unsupervised texture classification</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Automated blood vessel segmentation is an important issue for assessing retinal abnormalities and diagnoses of many diseases. The segmentation of vessels is complicated by huge variations in local contrast, particularly in case of the... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_56761056" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Automated blood vessel segmentation is an important issue for assessing retinal abnormalities and diagnoses of many diseases. The segmentation of vessels is complicated by huge variations in local contrast, particularly in case of the minor vessels. In this paper, we propose a new method of texture based vessel segmentation to overcome this problem. We use Gaussian and L * a * b * perceptually uniform color spaces with original RGB for texture feature extraction on retinal images. A bank of Gabor energy filters are used to analyze the texture features from which a feature vector is constructed for each pixel. The Fuzzy C-Means (FCM) clustering algorithm is used to classify the feature vectors into vessel or non-vessel based on the texture properties. From the FCM clustering output we attain the final output segmented image after a post processing step. We compare our method with hand-labeled ground truth segmentation of five images and achieve 84.37% sensitivity and 99.61% specificity.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/56761056" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="4a8198f3478deafdfe4b138e4478ef48" rel="nofollow" data-download="{&quot;attachment_id&quot;:71986908,&quot;asset_id&quot;:56761056,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/71986908/download_file?st=MTczOTcxMjAzNiw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="2490902" href="https://monash.academia.edu/JoeyChua">Joey Chua</a><script data-card-contents-for-user="2490902" type="text/json">{"id":2490902,"first_name":"Joey","last_name":"Chua","domain_name":"monash","page_name":"JoeyChua","display_name":"Joey Chua","profile_url":"https://monash.academia.edu/JoeyChua?f_ri=1185","photo":"https://0.academia-photos.com/2490902/778458/967304/s65_joey.chua.jpg"}</script></span></span></li><li class="js-paper-rank-work_56761056 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="56761056"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 56761056, container: ".js-paper-rank-work_56761056", }); });</script></li><li class="js-percentile-work_56761056 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 56761056; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_56761056"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_56761056 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="56761056"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 56761056; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=56761056]").text(description); $(".js-view-count-work_56761056").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_56761056").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="56761056"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">18</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="5394" rel="nofollow" href="https://www.academia.edu/Documents/in/Fuzzy_set_theory">Fuzzy set theory</a>,&nbsp;<script data-card-contents-for-ri="5394" type="text/json">{"id":5394,"name":"Fuzzy set theory","url":"https://www.academia.edu/Documents/in/Fuzzy_set_theory?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="26870" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_segmentation">Image segmentation</a>,&nbsp;<script data-card-contents-for-ri="26870" type="text/json">{"id":26870,"name":"Image segmentation","url":"https://www.academia.edu/Documents/in/Image_segmentation?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="55549" rel="nofollow" href="https://www.academia.edu/Documents/in/Gaussian_processes">Gaussian processes</a><script data-card-contents-for-ri="55549" type="text/json">{"id":55549,"name":"Gaussian processes","url":"https://www.academia.edu/Documents/in/Gaussian_processes?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=56761056]'), work: {"id":56761056,"title":"Blood vessel segmentation from color retinal images using unsupervised texture classification","created_at":"2021-10-09T02:47:02.112-07:00","url":"https://www.academia.edu/56761056/Blood_vessel_segmentation_from_color_retinal_images_using_unsupervised_texture_classification?f_ri=1185","dom_id":"work_56761056","summary":"Automated blood vessel segmentation is an important issue for assessing retinal abnormalities and diagnoses of many diseases. The segmentation of vessels is complicated by huge variations in local contrast, particularly in case of the minor vessels. In this paper, we propose a new method of texture based vessel segmentation to overcome this problem. We use Gaussian and L * a * b * perceptually uniform color spaces with original RGB for texture feature extraction on retinal images. A bank of Gabor energy filters are used to analyze the texture features from which a feature vector is constructed for each pixel. The Fuzzy C-Means (FCM) clustering algorithm is used to classify the feature vectors into vessel or non-vessel based on the texture properties. From the FCM clustering output we attain the final output segmented image after a post processing step. We compare our method with hand-labeled ground truth segmentation of five images and achieve 84.37% sensitivity and 99.61% specificity.","downloadable_attachments":[{"id":71986908,"asset_id":56761056,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":2490902,"first_name":"Joey","last_name":"Chua","domain_name":"monash","page_name":"JoeyChua","display_name":"Joey Chua","profile_url":"https://monash.academia.edu/JoeyChua?f_ri=1185","photo":"https://0.academia-photos.com/2490902/778458/967304/s65_joey.chua.jpg"}],"research_interests":[{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":5394,"name":"Fuzzy set theory","url":"https://www.academia.edu/Documents/in/Fuzzy_set_theory?f_ri=1185","nofollow":true},{"id":26870,"name":"Image segmentation","url":"https://www.academia.edu/Documents/in/Image_segmentation?f_ri=1185","nofollow":true},{"id":55549,"name":"Gaussian processes","url":"https://www.academia.edu/Documents/in/Gaussian_processes?f_ri=1185","nofollow":true},{"id":56368,"name":"Image Classification","url":"https://www.academia.edu/Documents/in/Image_Classification?f_ri=1185"},{"id":160144,"name":"Feature Extraction","url":"https://www.academia.edu/Documents/in/Feature_Extraction?f_ri=1185"},{"id":197605,"name":"Texture Classification","url":"https://www.academia.edu/Documents/in/Texture_Classification?f_ri=1185"},{"id":245634,"name":"Eye","url":"https://www.academia.edu/Documents/in/Eye?f_ri=1185"},{"id":253481,"name":"Vessel Segmentation","url":"https://www.academia.edu/Documents/in/Vessel_Segmentation?f_ri=1185"},{"id":327124,"name":"Color Space","url":"https://www.academia.edu/Documents/in/Color_Space?f_ri=1185"},{"id":329268,"name":"Medical Image","url":"https://www.academia.edu/Documents/in/Medical_Image?f_ri=1185"},{"id":574193,"name":"Filter Bank","url":"https://www.academia.edu/Documents/in/Filter_Bank?f_ri=1185"},{"id":822358,"name":"Ground Truth","url":"https://www.academia.edu/Documents/in/Ground_Truth?f_ri=1185"},{"id":878219,"name":"Texture Features","url":"https://www.academia.edu/Documents/in/Texture_Features?f_ri=1185"},{"id":1251210,"name":"Blood Vessel","url":"https://www.academia.edu/Documents/in/Blood_Vessel?f_ri=1185"},{"id":1612732,"name":"Retinal imaging","url":"https://www.academia.edu/Documents/in/Retinal_imaging?f_ri=1185"},{"id":3286327,"name":"Image Texture","url":"https://www.academia.edu/Documents/in/Image_Texture?f_ri=1185"},{"id":3641206,"name":"Fuzzy C mean","url":"https://www.academia.edu/Documents/in/Fuzzy_C_mean?f_ri=1185"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_43674906" data-work_id="43674906" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/43674906/Classifying_Chest_Pathology_Images_Using_Deep_Learning_Techniques">Classifying Chest Pathology Images Using Deep Learning Techniques</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">In this review, the application of in-depth learning for medical diagnosis will be corrected. A thorough analysis of various scientific articles in the domain of deep neural network applications in the medical field has been implemented.... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_43674906" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">In this review, the application of in-depth learning for medical diagnosis will be corrected. A thorough analysis of various scientific articles in the domain of deep neural network applications in the medical field has been implemented. Has received more than 300 research articles and after several steps of selection, 46 articles have been presented in more detail The research found that the neural network (CNN) is the most prevalent agent when talking about deep learning and medical image analysis. In addition, from the findings of this article, it can be observed that the application of widespread learning technology is widespread. But most of the applications that focus on bioinformatics, medical diagnostics and other similar fields. In this work, we examine the strength of the deep learning method for pathological examination in chest radiography. Convolutional neural networks (CNN) The method of deep architectural classification is popular due to the ability to learn to represent medium and high level images. We explore CNN&#39;s ability to identify different types of diseases in chest X-ray images. Moreover, because of the very large training sets that are not available in the medical domain, we therefore explore the possibility of using deep learning methods based on non-medical learning. We tested our algorithm on 93 datasets. We use CNN that is trained with ImageNet, which is a well-known non-animated large image database. The best performance is due to the use of features pulled from CNN and low-level features.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/43674906" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="eaf84ea6da854b131bc88f0f217f6088" rel="nofollow" data-download="{&quot;attachment_id&quot;:63976940,&quot;asset_id&quot;:43674906,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/63976940/download_file?st=MTczOTcxMjAzNiw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="118928009" href="https://unipune.academia.edu/vrushalidhanokar">Vrushali Dhanokar</a><script data-card-contents-for-user="118928009" type="text/json">{"id":118928009,"first_name":"Vrushali","last_name":"Dhanokar","domain_name":"unipune","page_name":"vrushalidhanokar","display_name":"Vrushali Dhanokar","profile_url":"https://unipune.academia.edu/vrushalidhanokar?f_ri=1185","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_43674906 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="43674906"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 43674906, container: ".js-paper-rank-work_43674906", }); });</script></li><li class="js-percentile-work_43674906 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 43674906; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_43674906"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_43674906 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="43674906"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 43674906; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=43674906]").text(description); $(".js-view-count-work_43674906").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_43674906").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="43674906"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">6</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="422" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Science">Computer Science</a>,&nbsp;<script data-card-contents-for-ri="422" type="text/json">{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="2008" rel="nofollow" href="https://www.academia.edu/Documents/in/Machine_Learning">Machine Learning</a>,&nbsp;<script data-card-contents-for-ri="2008" type="text/json">{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="2009" rel="nofollow" href="https://www.academia.edu/Documents/in/Data_Mining">Data Mining</a><script data-card-contents-for-ri="2009" type="text/json">{"id":2009,"name":"Data Mining","url":"https://www.academia.edu/Documents/in/Data_Mining?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=43674906]'), work: {"id":43674906,"title":"Classifying Chest Pathology Images Using Deep Learning Techniques","created_at":"2020-07-20T20:35:15.982-07:00","url":"https://www.academia.edu/43674906/Classifying_Chest_Pathology_Images_Using_Deep_Learning_Techniques?f_ri=1185","dom_id":"work_43674906","summary":"In this review, the application of in-depth learning for medical diagnosis will be corrected. A thorough analysis of various scientific articles in the domain of deep neural network applications in the medical field has been implemented. Has received more than 300 research articles and after several steps of selection, 46 articles have been presented in more detail The research found that the neural network (CNN) is the most prevalent agent when talking about deep learning and medical image analysis. In addition, from the findings of this article, it can be observed that the application of widespread learning technology is widespread. But most of the applications that focus on bioinformatics, medical diagnostics and other similar fields. In this work, we examine the strength of the deep learning method for pathological examination in chest radiography. Convolutional neural networks (CNN) The method of deep architectural classification is popular due to the ability to learn to represent medium and high level images. We explore CNN's ability to identify different types of diseases in chest X-ray images. Moreover, because of the very large training sets that are not available in the medical domain, we therefore explore the possibility of using deep learning methods based on non-medical learning. We tested our algorithm on 93 datasets. We use CNN that is trained with ImageNet, which is a well-known non-animated large image database. The best performance is due to the use of features pulled from CNN and low-level features.","downloadable_attachments":[{"id":63976940,"asset_id":43674906,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":118928009,"first_name":"Vrushali","last_name":"Dhanokar","domain_name":"unipune","page_name":"vrushalidhanokar","display_name":"Vrushali Dhanokar","profile_url":"https://unipune.academia.edu/vrushalidhanokar?f_ri=1185","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science?f_ri=1185","nofollow":true},{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=1185","nofollow":true},{"id":2009,"name":"Data Mining","url":"https://www.academia.edu/Documents/in/Data_Mining?f_ri=1185","nofollow":true},{"id":54123,"name":"Artificial Neural Networks","url":"https://www.academia.edu/Documents/in/Artificial_Neural_Networks?f_ri=1185"},{"id":81182,"name":"Deep Learning","url":"https://www.academia.edu/Documents/in/Deep_Learning?f_ri=1185"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_39075099" data-work_id="39075099" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/39075099/The_LASSO_Risk_for_Gaussian_Matrices">The LASSO Risk for Gaussian Matrices</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">We consider the problem of learning a coefficient vector x 0 ∈ R N from noisy linear observation y = Ax 0 + w ∈ R n . In many contexts (ranging from model selection to image processing) it is desirable to construct a sparse estimator x.... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_39075099" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">We consider the problem of learning a coefficient vector x 0 ∈ R N from noisy linear observation y = Ax 0 + w ∈ R n . In many contexts (ranging from model selection to image processing) it is desirable to construct a sparse estimator x. In this case, a popular approach consists in solving an ℓ 1 -penalized least squares problem known as the LASSO or Basis Pursuit DeNoising (BPDN).</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/39075099" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="2138e97dc9f3f2c72b50dfeab8748388" rel="nofollow" data-download="{&quot;attachment_id&quot;:59191120,&quot;asset_id&quot;:39075099,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/59191120/download_file?st=MTczOTcxMjAzNiw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="112410438" href="https://independent.academia.edu/MohsenBayati1">Mohsen Bayati</a><script data-card-contents-for-user="112410438" type="text/json">{"id":112410438,"first_name":"Mohsen","last_name":"Bayati","domain_name":"independent","page_name":"MohsenBayati1","display_name":"Mohsen Bayati","profile_url":"https://independent.academia.edu/MohsenBayati1?f_ri=1185","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_39075099 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="39075099"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 39075099, container: ".js-paper-rank-work_39075099", }); });</script></li><li class="js-percentile-work_39075099 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 39075099; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_39075099"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_39075099 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="39075099"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 39075099; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=39075099]").text(description); $(".js-view-count-work_39075099").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_39075099").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="39075099"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">13</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1410" rel="nofollow" href="https://www.academia.edu/Documents/in/Information_Theory">Information Theory</a>,&nbsp;<script data-card-contents-for-ri="1410" type="text/json">{"id":1410,"name":"Information Theory","url":"https://www.academia.edu/Documents/in/Information_Theory?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="7909" rel="nofollow" href="https://www.academia.edu/Documents/in/Compressed_Sensing">Compressed Sensing</a>,&nbsp;<script data-card-contents-for-ri="7909" type="text/json">{"id":7909,"name":"Compressed Sensing","url":"https://www.academia.edu/Documents/in/Compressed_Sensing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="38246" rel="nofollow" href="https://www.academia.edu/Documents/in/Statistical_Learning">Statistical Learning</a><script data-card-contents-for-ri="38246" type="text/json">{"id":38246,"name":"Statistical Learning","url":"https://www.academia.edu/Documents/in/Statistical_Learning?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=39075099]'), work: {"id":39075099,"title":"The LASSO Risk for Gaussian Matrices","created_at":"2019-05-09T11:04:47.342-07:00","url":"https://www.academia.edu/39075099/The_LASSO_Risk_for_Gaussian_Matrices?f_ri=1185","dom_id":"work_39075099","summary":"We consider the problem of learning a coefficient vector x 0 ∈ R N from noisy linear observation y = Ax 0 + w ∈ R n . In many contexts (ranging from model selection to image processing) it is desirable to construct a sparse estimator x. In this case, a popular approach consists in solving an ℓ 1 -penalized least squares problem known as the LASSO or Basis Pursuit DeNoising (BPDN).","downloadable_attachments":[{"id":59191120,"asset_id":39075099,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":112410438,"first_name":"Mohsen","last_name":"Bayati","domain_name":"independent","page_name":"MohsenBayati1","display_name":"Mohsen Bayati","profile_url":"https://independent.academia.edu/MohsenBayati1?f_ri=1185","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":1410,"name":"Information Theory","url":"https://www.academia.edu/Documents/in/Information_Theory?f_ri=1185","nofollow":true},{"id":7909,"name":"Compressed Sensing","url":"https://www.academia.edu/Documents/in/Compressed_Sensing?f_ri=1185","nofollow":true},{"id":38246,"name":"Statistical Learning","url":"https://www.academia.edu/Documents/in/Statistical_Learning?f_ri=1185","nofollow":true},{"id":77932,"name":"Mean square error","url":"https://www.academia.edu/Documents/in/Mean_square_error?f_ri=1185"},{"id":85344,"name":"Model Selection","url":"https://www.academia.edu/Documents/in/Model_Selection?f_ri=1185"},{"id":116787,"name":"Algorithm Design","url":"https://www.academia.edu/Documents/in/Algorithm_Design?f_ri=1185"},{"id":137277,"name":"Random Matrix Theory","url":"https://www.academia.edu/Documents/in/Random_Matrix_Theory?f_ri=1185"},{"id":149059,"name":"Efficient Algorithm for ECG Coding","url":"https://www.academia.edu/Documents/in/Efficient_Algorithm_for_ECG_Coding?f_ri=1185"},{"id":886652,"name":"Graphical Model","url":"https://www.academia.edu/Documents/in/Graphical_Model?f_ri=1185"},{"id":912908,"name":"Noise Measurement","url":"https://www.academia.edu/Documents/in/Noise_Measurement?f_ri=1185"},{"id":1237788,"name":"Electrical And Electronic Engineering","url":"https://www.academia.edu/Documents/in/Electrical_And_Electronic_Engineering?f_ri=1185"},{"id":1839517,"name":"Basis Pursuit","url":"https://www.academia.edu/Documents/in/Basis_Pursuit?f_ri=1185"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_34702407" data-work_id="34702407" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/34702407/Detection_and_Inpainting_of_Facial_Wrinkles">Detection and Inpainting of Facial Wrinkles</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><a href="https://www.irjet.net/archives/V3/i5/IRJET-V3I5183.pdf" rel="nofollow">https://www.irjet.net/archives/V3/i5/IRJET-V3I5183.pdf</a></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/34702407" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="6e5a7a59d8165844ba31213032a1dad1" rel="nofollow" data-download="{&quot;attachment_id&quot;:54560181,&quot;asset_id&quot;:34702407,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/54560181/download_file?st=MTczOTcxMjAzNiw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="31493941" href="https://irjet.academia.edu/IRJET">IRJET Journal</a><script data-card-contents-for-user="31493941" type="text/json">{"id":31493941,"first_name":"IRJET","last_name":"Journal","domain_name":"irjet","page_name":"IRJET","display_name":"IRJET Journal","profile_url":"https://irjet.academia.edu/IRJET?f_ri=1185","photo":"https://0.academia-photos.com/31493941/9304077/11813823/s65_irjet.journal.jpg"}</script></span></span></li><li class="js-paper-rank-work_34702407 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="34702407"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 34702407, container: ".js-paper-rank-work_34702407", }); });</script></li><li class="js-percentile-work_34702407 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 34702407; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_34702407"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_34702407 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="34702407"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 34702407; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=34702407]").text(description); $(".js-view-count-work_34702407").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_34702407").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="34702407"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i></div><span class="InlineList-item-text u-textTruncate u-pl6x"><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a><script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (false) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=34702407]'), work: {"id":34702407,"title":"Detection and Inpainting of Facial Wrinkles","created_at":"2017-09-28T01:23:52.608-07:00","url":"https://www.academia.edu/34702407/Detection_and_Inpainting_of_Facial_Wrinkles?f_ri=1185","dom_id":"work_34702407","summary":"https://www.irjet.net/archives/V3/i5/IRJET-V3I5183.pdf","downloadable_attachments":[{"id":54560181,"asset_id":34702407,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":31493941,"first_name":"IRJET","last_name":"Journal","domain_name":"irjet","page_name":"IRJET","display_name":"IRJET Journal","profile_url":"https://irjet.academia.edu/IRJET?f_ri=1185","photo":"https://0.academia-photos.com/31493941/9304077/11813823/s65_irjet.journal.jpg"}],"research_interests":[{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_2496048" data-work_id="2496048" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/2496048/Segmentation_of_the_Biliary_Tree_in_MRCP_Data">Segmentation of the Biliary Tree in MRCP Data</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Magnetic Resonance Cholangiopancreatography (MRCP) is a type of MR imaging which utilises protocols designed to enhance stationary fluids in the imaged volume. In this way it visualises the pancreatobiliary tract by highlighting the bile... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_2496048" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Magnetic Resonance Cholangiopancreatography (MRCP) is a type of MR imaging which utilises protocols designed to enhance stationary fluids in the imaged volume. In this way it visualises the pancreatobiliary tract by highlighting the bile and pancreatic juices in the system. Current practice sees this data being assessed directly, with little or no processing being performed prior to review. MRCP data presents three main difficulties when it comes to image processing. The first is the relatively noisy nature of the data. Second is its low spatial resolution, especially in the inter-slice direction. And third, the variability observed between MRCP studies, which makes consistent results difficult to attain. This paper describes the initial phase of research which aims to develop assistive image analysis techniques to aid in the interpretation of MRCP data. The first stage in this process is the robust segmentation of the pancreatobiliary system. To this end a segmentation procedure has been developed using an approach based on the tools and techniques of the mathematical morphology. This paper examines the task at hand and presents initial results, describing and assessing the segmentation approach developed.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/2496048" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="5d6abfe6e6935df8a1bbed249de959d3" rel="nofollow" data-download="{&quot;attachment_id&quot;:50599993,&quot;asset_id&quot;:2496048,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/50599993/download_file?st=MTczOTcxMjAzNiw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="3253327" href="https://dcu.academia.edu/PaulWhelan">Paul Whelan</a><script data-card-contents-for-user="3253327" type="text/json">{"id":3253327,"first_name":"Paul","last_name":"Whelan","domain_name":"dcu","page_name":"PaulWhelan","display_name":"Paul Whelan","profile_url":"https://dcu.academia.edu/PaulWhelan?f_ri=1185","photo":"https://0.academia-photos.com/3253327/1075022/1341287/s65_paul.whelan.jpg"}</script></span></span></li><li class="js-paper-rank-work_2496048 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="2496048"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 2496048, container: ".js-paper-rank-work_2496048", }); });</script></li><li class="js-percentile-work_2496048 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 2496048; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_2496048"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_2496048 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="2496048"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 2496048; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=2496048]").text(description); $(".js-view-count-work_2496048").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_2496048").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="2496048"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">3</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1122411" rel="nofollow" href="https://www.academia.edu/Documents/in/Mr_Imaging">Mr Imaging</a>,&nbsp;<script data-card-contents-for-ri="1122411" type="text/json">{"id":1122411,"name":"Mr Imaging","url":"https://www.academia.edu/Documents/in/Mr_Imaging?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="2405994" rel="nofollow" href="https://www.academia.edu/Documents/in/Magnetic_Resonance_Cholangiopancreatography">Magnetic Resonance Cholangiopancreatography</a><script data-card-contents-for-ri="2405994" type="text/json">{"id":2405994,"name":"Magnetic Resonance Cholangiopancreatography","url":"https://www.academia.edu/Documents/in/Magnetic_Resonance_Cholangiopancreatography?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=2496048]'), work: {"id":2496048,"title":"Segmentation of the Biliary Tree in MRCP Data","created_at":"2013-01-31T20:48:50.918-08:00","url":"https://www.academia.edu/2496048/Segmentation_of_the_Biliary_Tree_in_MRCP_Data?f_ri=1185","dom_id":"work_2496048","summary":"Magnetic Resonance Cholangiopancreatography (MRCP) is a type of MR imaging which utilises protocols designed to enhance stationary fluids in the imaged volume. In this way it visualises the pancreatobiliary tract by highlighting the bile and pancreatic juices in the system. Current practice sees this data being assessed directly, with little or no processing being performed prior to review. MRCP data presents three main difficulties when it comes to image processing. The first is the relatively noisy nature of the data. Second is its low spatial resolution, especially in the inter-slice direction. And third, the variability observed between MRCP studies, which makes consistent results difficult to attain. This paper describes the initial phase of research which aims to develop assistive image analysis techniques to aid in the interpretation of MRCP data. The first stage in this process is the robust segmentation of the pancreatobiliary system. To this end a segmentation procedure has been developed using an approach based on the tools and techniques of the mathematical morphology. This paper examines the task at hand and presents initial results, describing and assessing the segmentation approach developed.","downloadable_attachments":[{"id":50599993,"asset_id":2496048,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":3253327,"first_name":"Paul","last_name":"Whelan","domain_name":"dcu","page_name":"PaulWhelan","display_name":"Paul Whelan","profile_url":"https://dcu.academia.edu/PaulWhelan?f_ri=1185","photo":"https://0.academia-photos.com/3253327/1075022/1341287/s65_paul.whelan.jpg"}],"research_interests":[{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":1122411,"name":"Mr Imaging","url":"https://www.academia.edu/Documents/in/Mr_Imaging?f_ri=1185","nofollow":true},{"id":2405994,"name":"Magnetic Resonance Cholangiopancreatography","url":"https://www.academia.edu/Documents/in/Magnetic_Resonance_Cholangiopancreatography?f_ri=1185","nofollow":true}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_27583121" data-work_id="27583121" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/27583121/A_Review_of_Biometric_Identification_In_Signal_Processing">A Review of Biometric Identification In Signal Processing</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">This paper describes person identity by fingerprint , Face recognition, and voice information using Bio-metrics tool. The person is modeled by their features using Gaussian Mixture Model (GMM). Bio metrics is frequently used in signal... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_27583121" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">This paper describes person identity by fingerprint , Face recognition, and voice information using Bio-metrics tool. The person is modeled by their features using Gaussian Mixture Model (GMM). Bio metrics is frequently used in signal processing applications. Thus, we concentrated on the methodology of biometrics for person identification which is useful in industrial and military security systems. The statistical values are measured by GMM in pattern recognition, Face recognition, and voice Recognition. These statistical values will helps for modeling of person using Bio-metric technique. The voice features are mapped into a Mel-Frequency-Cepstral-Coefficients (MFCC) form. The process of indentifying a person using MFCC data is described in this paper.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/27583121" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="7f7cdca336f878c981a7679f08b4acd2" rel="nofollow" data-download="{&quot;attachment_id&quot;:47841659,&quot;asset_id&quot;:27583121,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/47841659/download_file?st=MTczOTcxMjAzNiw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="7371781" href="https://independent.academia.edu/irpublications">Innovative Research Publications</a><script data-card-contents-for-user="7371781" type="text/json">{"id":7371781,"first_name":"Innovative Research","last_name":"Publications","domain_name":"independent","page_name":"irpublications","display_name":"Innovative Research Publications","profile_url":"https://independent.academia.edu/irpublications?f_ri=1185","photo":"https://0.academia-photos.com/7371781/2714820/18927320/s65_innovative_research.publications.jpg"}</script></span></span></li><li class="js-paper-rank-work_27583121 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="27583121"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 27583121, container: ".js-paper-rank-work_27583121", }); });</script></li><li class="js-percentile-work_27583121 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 27583121; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_27583121"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_27583121 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="27583121"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 27583121; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=27583121]").text(description); $(".js-view-count-work_27583121").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_27583121").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="27583121"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i></div><span class="InlineList-item-text u-textTruncate u-pl6x"><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a><script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (false) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=27583121]'), work: {"id":27583121,"title":"A Review of Biometric Identification In Signal Processing","created_at":"2016-08-06T10:33:10.136-07:00","url":"https://www.academia.edu/27583121/A_Review_of_Biometric_Identification_In_Signal_Processing?f_ri=1185","dom_id":"work_27583121","summary":"This paper describes person identity by fingerprint , Face recognition, and voice information using Bio-metrics tool. The person is modeled by their features using Gaussian Mixture Model (GMM). Bio metrics is frequently used in signal processing applications. Thus, we concentrated on the methodology of biometrics for person identification which is useful in industrial and military security systems. The statistical values are measured by GMM in pattern recognition, Face recognition, and voice Recognition. These statistical values will helps for modeling of person using Bio-metric technique. The voice features are mapped into a Mel-Frequency-Cepstral-Coefficients (MFCC) form. The process of indentifying a person using MFCC data is described in this paper.","downloadable_attachments":[{"id":47841659,"asset_id":27583121,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":7371781,"first_name":"Innovative Research","last_name":"Publications","domain_name":"independent","page_name":"irpublications","display_name":"Innovative Research Publications","profile_url":"https://independent.academia.edu/irpublications?f_ri=1185","photo":"https://0.academia-photos.com/7371781/2714820/18927320/s65_innovative_research.publications.jpg"}],"research_interests":[{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_26413505 coauthored" data-work_id="26413505" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/26413505/Application_of_Landsat_imagery_to_verify_near_shore_water_circulation_of_Saronikos_gulf_as_resulted_from_numerical_modelling">Application of Landsat imagery, to verify near shore water circulation of Saronikos gulf, as resulted from numerical modelling</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Hydrodynamic numerical models nowadays offer great aid when studying the physical behavior of coastal waters. However, the calibration and furthermore the verification of such models, is a difficult process that should be applied... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_26413505" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Hydrodynamic numerical models nowadays offer great aid when studying the physical behavior of coastal waters. However, the calibration and furthermore the verification of such models, is a difficult process that should be applied carefully, especially when little direct hydrodynamic data has been collected from the study area. This paper suggests a methodology for indirect verification of mathematical coastal circulation models, by means of image processing and interpretation of Landsat&#39;s TM reflective bands 3 and 6 imagery. This approach is an efficient practice and is easily available even though no field survey was carried out. This method was successfully applied to Saronikos Gulf and in particular to gulf of Megara, located westwards of Salamis Island, where the dispersion of a plume formed by urban waste waters was examined. The satellite data processing derivatives were found to be in accordance with the results of the circulation simulation and thus the parameters used for the calibration of the model can be considered successfully selected.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/26413505" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="f57e7f4694493dbe7e6a46b2488a509d" rel="nofollow" data-download="{&quot;attachment_id&quot;:46711658,&quot;asset_id&quot;:26413505,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/46711658/download_file?st=MTczOTcxMjAzNiw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="50415994" href="https://independent.academia.edu/EFakiris">E. Fakiris</a><script data-card-contents-for-user="50415994" type="text/json">{"id":50415994,"first_name":"E.","last_name":"Fakiris","domain_name":"independent","page_name":"EFakiris","display_name":"E. Fakiris","profile_url":"https://independent.academia.edu/EFakiris?f_ri=1185","photo":"/images/s65_no_pic.png"}</script></span></span><span class="u-displayInlineBlock InlineList-item-text">&nbsp;and&nbsp;<span class="u-textDecorationUnderline u-clickable InlineList-item-text js-work-more-authors-26413505">+1</span><div class="hidden js-additional-users-26413505"><div><span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a href="https://upatras.academia.edu/IZacharias">I. Zacharias</a></span></div></div></span><script>(function(){ var popoverSettings = { el: $('.js-work-more-authors-26413505'), placement: 'bottom', hide_delay: 200, html: true, content: function(){ return $('.js-additional-users-26413505').html(); } } new HoverPopover(popoverSettings); })();</script></li><li class="js-paper-rank-work_26413505 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="26413505"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 26413505, container: ".js-paper-rank-work_26413505", }); });</script></li><li class="js-percentile-work_26413505 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 26413505; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_26413505"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_26413505 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="26413505"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 26413505; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=26413505]").text(description); $(".js-view-count-work_26413505").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_26413505").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="26413505"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">3</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="237" rel="nofollow" href="https://www.academia.edu/Documents/in/Cognitive_Science">Cognitive Science</a>,&nbsp;<script data-card-contents-for-ri="237" type="text/json">{"id":237,"name":"Cognitive Science","url":"https://www.academia.edu/Documents/in/Cognitive_Science?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="117397" rel="nofollow" href="https://www.academia.edu/Documents/in/Verification">Verification</a><script data-card-contents-for-ri="117397" type="text/json">{"id":117397,"name":"Verification","url":"https://www.academia.edu/Documents/in/Verification?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=26413505]'), work: {"id":26413505,"title":"Application of Landsat imagery, to verify near shore water circulation of Saronikos gulf, as resulted from numerical modelling","created_at":"2016-06-22T10:39:13.957-07:00","url":"https://www.academia.edu/26413505/Application_of_Landsat_imagery_to_verify_near_shore_water_circulation_of_Saronikos_gulf_as_resulted_from_numerical_modelling?f_ri=1185","dom_id":"work_26413505","summary":"Hydrodynamic numerical models nowadays offer great aid when studying the physical behavior of coastal waters. However, the calibration and furthermore the verification of such models, is a difficult process that should be applied carefully, especially when little direct hydrodynamic data has been collected from the study area. This paper suggests a methodology for indirect verification of mathematical coastal circulation models, by means of image processing and interpretation of Landsat's TM reflective bands 3 and 6 imagery. This approach is an efficient practice and is easily available even though no field survey was carried out. This method was successfully applied to Saronikos Gulf and in particular to gulf of Megara, located westwards of Salamis Island, where the dispersion of a plume formed by urban waste waters was examined. The satellite data processing derivatives were found to be in accordance with the results of the circulation simulation and thus the parameters used for the calibration of the model can be considered successfully selected.","downloadable_attachments":[{"id":46711658,"asset_id":26413505,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":50415994,"first_name":"E.","last_name":"Fakiris","domain_name":"independent","page_name":"EFakiris","display_name":"E. Fakiris","profile_url":"https://independent.academia.edu/EFakiris?f_ri=1185","photo":"/images/s65_no_pic.png"},{"id":50301651,"first_name":"I.","last_name":"Zacharias","domain_name":"upatras","page_name":"IZacharias","display_name":"I. Zacharias","profile_url":"https://upatras.academia.edu/IZacharias?f_ri=1185","photo":"https://0.academia-photos.com/50301651/13226051/14508101/s65_i..zacharias.jpg"}],"research_interests":[{"id":237,"name":"Cognitive Science","url":"https://www.academia.edu/Documents/in/Cognitive_Science?f_ri=1185","nofollow":true},{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":117397,"name":"Verification","url":"https://www.academia.edu/Documents/in/Verification?f_ri=1185","nofollow":true}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_14673075" data-work_id="14673075" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/14673075/An_extensible_infrastructure_for_processing_distributed_geospatial_data_streams">An extensible infrastructure for processing distributed geospatial data streams</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Although the processing of data streams has been the focus of many research efforts in several areas, the case of remotely sensed streams in scientific contexts has received little attention. We present an extensible architecture to... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_14673075" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Although the processing of data streams has been the focus of many research efforts in several areas, the case of remotely sensed streams in scientific contexts has received little attention. We present an extensible architecture to compose streaming image processing pipelines spanning multiple nodes on a network using a scientific workflow approach. This architecture includes (i) a mechanism for stream query dispatching so new streams can be dynamically generated from within individual processing nodes as a result of local or remote requests, and (ii) a mechanism for making the resulting streams externally available. As complete processing image pipelines can be cascaded across multiple interconnected nodes in a dynamic, scientist-driven way, the approach facilitates the reuse of data and the scalability of computations. We demonstrate the advantages of our infrastructure with a toolset of stream operators acting on remotely sensed data streams for realtime change detection.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/14673075" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="a907406b85afb6368a53ba01b26cc949" rel="nofollow" data-download="{&quot;attachment_id&quot;:43992382,&quot;asset_id&quot;:14673075,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/43992382/download_file?st=MTczOTcxMjAzNiw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="33618708" href="https://ucdavis.academia.edu/BerndHamann">Bernd Hamann</a><script data-card-contents-for-user="33618708" type="text/json">{"id":33618708,"first_name":"Bernd","last_name":"Hamann","domain_name":"ucdavis","page_name":"BerndHamann","display_name":"Bernd Hamann","profile_url":"https://ucdavis.academia.edu/BerndHamann?f_ri=1185","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_14673075 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="14673075"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 14673075, container: ".js-paper-rank-work_14673075", }); });</script></li><li class="js-percentile-work_14673075 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 14673075; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_14673075"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_14673075 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="14673075"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 14673075; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=14673075]").text(description); $(".js-view-count-work_14673075").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_14673075").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="14673075"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">8</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1252" rel="nofollow" href="https://www.academia.edu/Documents/in/Remote_Sensing">Remote Sensing</a>,&nbsp;<script data-card-contents-for-ri="1252" type="text/json">{"id":1252,"name":"Remote Sensing","url":"https://www.academia.edu/Documents/in/Remote_Sensing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="84291" rel="nofollow" href="https://www.academia.edu/Documents/in/Change_detection">Change detection</a>,&nbsp;<script data-card-contents-for-ri="84291" type="text/json">{"id":84291,"name":"Change detection","url":"https://www.academia.edu/Documents/in/Change_detection?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="229390" rel="nofollow" href="https://www.academia.edu/Documents/in/Real_Time">Real Time</a><script data-card-contents-for-ri="229390" type="text/json">{"id":229390,"name":"Real Time","url":"https://www.academia.edu/Documents/in/Real_Time?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=14673075]'), work: {"id":14673075,"title":"An extensible infrastructure for processing distributed geospatial data streams","created_at":"2015-08-04T18:13:21.672-07:00","url":"https://www.academia.edu/14673075/An_extensible_infrastructure_for_processing_distributed_geospatial_data_streams?f_ri=1185","dom_id":"work_14673075","summary":"Although the processing of data streams has been the focus of many research efforts in several areas, the case of remotely sensed streams in scientific contexts has received little attention. We present an extensible architecture to compose streaming image processing pipelines spanning multiple nodes on a network using a scientific workflow approach. This architecture includes (i) a mechanism for stream query dispatching so new streams can be dynamically generated from within individual processing nodes as a result of local or remote requests, and (ii) a mechanism for making the resulting streams externally available. As complete processing image pipelines can be cascaded across multiple interconnected nodes in a dynamic, scientist-driven way, the approach facilitates the reuse of data and the scalability of computations. We demonstrate the advantages of our infrastructure with a toolset of stream operators acting on remotely sensed data streams for realtime change detection.","downloadable_attachments":[{"id":43992382,"asset_id":14673075,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":33618708,"first_name":"Bernd","last_name":"Hamann","domain_name":"ucdavis","page_name":"BerndHamann","display_name":"Bernd Hamann","profile_url":"https://ucdavis.academia.edu/BerndHamann?f_ri=1185","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":1252,"name":"Remote Sensing","url":"https://www.academia.edu/Documents/in/Remote_Sensing?f_ri=1185","nofollow":true},{"id":84291,"name":"Change detection","url":"https://www.academia.edu/Documents/in/Change_detection?f_ri=1185","nofollow":true},{"id":229390,"name":"Real Time","url":"https://www.academia.edu/Documents/in/Real_Time?f_ri=1185","nofollow":true},{"id":447972,"name":"Scientific Workflow","url":"https://www.academia.edu/Documents/in/Scientific_Workflow?f_ri=1185"},{"id":574000,"name":"THERMAL INFRARED REMOTE SENSING DATA","url":"https://www.academia.edu/Documents/in/THERMAL_INFRARED_REMOTE_SENSING_DATA?f_ri=1185"},{"id":575715,"name":"Geospatial Data","url":"https://www.academia.edu/Documents/in/Geospatial_Data?f_ri=1185"},{"id":1975886,"name":"Data Stream","url":"https://www.academia.edu/Documents/in/Data_Stream?f_ri=1185"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_23428963 coauthored" data-work_id="23428963" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/23428963/New_opportunities_of_pegmatites_enrichment_by_optical_sorting">New opportunities of pegmatites enrichment by optical sorting</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">The paper presents the research results of pegmatites from Karelian deposits. The aim of this research was to find selective features of microcline, biotite, muscovite, quartz, and plagioclase for determining the opportunity of their... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_23428963" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">The paper presents the research results of pegmatites from Karelian deposits. The aim of this research was to find<br />selective features of microcline, biotite, muscovite, quartz, and plagioclase for determining the opportunity of their<br />selection from original ore by optical sorting method which based on color differences of analyzed objects. Studies have<br />shown that the solution of the problem of these minerals separation is possible in 3 stages. In the first stage groups<br />&quot;microcline&quot;, &quot;muscovite and biotite&quot;, &quot;quartz and plagioclase,&quot; are separated according to the values of channels hue H<br />and lightness L in the color model HLS. In the second stage biotite and muscovite are separated from each other by the<br />values of the channel hue H and saturation S. Finally, in the third stage couple &quot;quartz - plagioclase&quot; are separated. But<br />these minerals are indistinguishable from each other by color, so it&#39;s proposed to separate them by selective feature<br />&quot;surface structure&quot;.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/23428963" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="4a253220da02abdd1d0b39de979d062f" rel="nofollow" data-download="{&quot;attachment_id&quot;:43870332,&quot;asset_id&quot;:23428963,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/43870332/download_file?st=MTczOTcxMjAzNiw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="44370594" href="https://ifmo.academia.edu/ArtemAlekhin">Artem Alekhin</a><script data-card-contents-for-user="44370594" type="text/json">{"id":44370594,"first_name":"Artem","last_name":"Alekhin","domain_name":"ifmo","page_name":"ArtemAlekhin","display_name":"Artem Alekhin","profile_url":"https://ifmo.academia.edu/ArtemAlekhin?f_ri=1185","photo":"https://0.academia-photos.com/44370594/12045371/13419171/s65_artem.alekhin.jpg"}</script></span></span><span class="u-displayInlineBlock InlineList-item-text">&nbsp;and&nbsp;<span class="u-textDecorationUnderline u-clickable InlineList-item-text js-work-more-authors-23428963">+1</span><div class="hidden js-additional-users-23428963"><div><span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a href="https://independent.academia.edu/ElenaVGorbunova">Elena V. Gorbunova</a></span></div></div></span><script>(function(){ var popoverSettings = { el: $('.js-work-more-authors-23428963'), placement: 'bottom', hide_delay: 200, html: true, content: function(){ return $('.js-additional-users-23428963').html(); } } new HoverPopover(popoverSettings); })();</script></li><li class="js-paper-rank-work_23428963 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="23428963"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 23428963, container: ".js-paper-rank-work_23428963", }); });</script></li><li class="js-percentile-work_23428963 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 23428963; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_23428963"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_23428963 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="23428963"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 23428963; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=23428963]").text(description); $(".js-view-count-work_23428963").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_23428963").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="23428963"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">4</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="516" rel="nofollow" href="https://www.academia.edu/Documents/in/Optics">Optics</a>,&nbsp;<script data-card-contents-for-ri="516" type="text/json">{"id":516,"name":"Optics","url":"https://www.academia.edu/Documents/in/Optics?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="18496" rel="nofollow" href="https://www.academia.edu/Documents/in/Mining">Mining</a>,&nbsp;<script data-card-contents-for-ri="18496" type="text/json">{"id":18496,"name":"Mining","url":"https://www.academia.edu/Documents/in/Mining?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1969853" rel="nofollow" href="https://www.academia.edu/Documents/in/Optical_Sorting">Optical Sorting</a><script data-card-contents-for-ri="1969853" type="text/json">{"id":1969853,"name":"Optical Sorting","url":"https://www.academia.edu/Documents/in/Optical_Sorting?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=23428963]'), work: {"id":23428963,"title":"New opportunities of pegmatites enrichment by optical sorting","created_at":"2016-03-18T14:53:40.542-07:00","url":"https://www.academia.edu/23428963/New_opportunities_of_pegmatites_enrichment_by_optical_sorting?f_ri=1185","dom_id":"work_23428963","summary":"The paper presents the research results of pegmatites from Karelian deposits. The aim of this research was to find\nselective features of microcline, biotite, muscovite, quartz, and plagioclase for determining the opportunity of their\nselection from original ore by optical sorting method which based on color differences of analyzed objects. Studies have\nshown that the solution of the problem of these minerals separation is possible in 3 stages. In the first stage groups\n\"microcline\", \"muscovite and biotite\", \"quartz and plagioclase,\" are separated according to the values of channels hue H\nand lightness L in the color model HLS. In the second stage biotite and muscovite are separated from each other by the\nvalues of the channel hue H and saturation S. Finally, in the third stage couple \"quartz - plagioclase\" are separated. But\nthese minerals are indistinguishable from each other by color, so it's proposed to separate them by selective feature\n\"surface structure\".","downloadable_attachments":[{"id":43870332,"asset_id":23428963,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":44370594,"first_name":"Artem","last_name":"Alekhin","domain_name":"ifmo","page_name":"ArtemAlekhin","display_name":"Artem Alekhin","profile_url":"https://ifmo.academia.edu/ArtemAlekhin?f_ri=1185","photo":"https://0.academia-photos.com/44370594/12045371/13419171/s65_artem.alekhin.jpg"},{"id":32556781,"first_name":"Elena V.","last_name":"Gorbunova","domain_name":"independent","page_name":"ElenaVGorbunova","display_name":"Elena V. Gorbunova","profile_url":"https://independent.academia.edu/ElenaVGorbunova?f_ri=1185","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":516,"name":"Optics","url":"https://www.academia.edu/Documents/in/Optics?f_ri=1185","nofollow":true},{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":18496,"name":"Mining","url":"https://www.academia.edu/Documents/in/Mining?f_ri=1185","nofollow":true},{"id":1969853,"name":"Optical Sorting","url":"https://www.academia.edu/Documents/in/Optical_Sorting?f_ri=1185","nofollow":true}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_22269310 coauthored" data-work_id="22269310" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/22269310/_title_Visual_enhancement_of_micro_CT_bone_density_images_title_">&lt;title&gt;Visual enhancement of micro CT bone density images&lt;/title&gt;</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">The primary goal of this research was to provide image processing support to aid in the identification of those subjects most affected by bone loss when exposed to weightlessness and provide insight into the causes for large variability.... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_22269310" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">The primary goal of this research was to provide image processing support to aid in the identification of those subjects most affected by bone loss when exposed to weightlessness and provide insight into the causes for large variability. Past research has demonstrated that genetically distinct strains of mice exhibit different degrees of bone loss when subjected to simulated weightlessness. Bone loss is quantified by in vivo computed tomography (CT) imaging. The first step in evaluating bone density is to segment gray scale images into separate regions of bone and background. Two of the most common methods for implementing image segmentation are thresholding and edge detection. Thresholding is generally considered the simplest segmentation process which can be obtained by having a user visually select a threshold using a sliding scale. This is a highly subjective process with great potential for variation from one observer to another. One way to reduce inter-observer variability is to have several users independently set the threshold and average their results but this is a very time consuming process. A better approach is to apply an objective adaptive technique such as the Riddler / Calvard method. In our study we have concluded that thresholding was better than edge detection and pre-processing these images with an iterative deconvolution algorithm prior to adaptive thresholding yields superior visualization when compared with images that have not been pre-processed or images that have been pre-processed with a filter.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/22269310" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="8436003e9bf4f6bab8d98b94074d4fda" rel="nofollow" data-download="{&quot;attachment_id&quot;:42918594,&quot;asset_id&quot;:22269310,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/42918594/download_file?st=MTczOTcxMjAzNiw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="43658811" href="https://independent.academia.edu/CTirrell">Charles Tirrell</a><script data-card-contents-for-user="43658811" type="text/json">{"id":43658811,"first_name":"Charles","last_name":"Tirrell","domain_name":"independent","page_name":"CTirrell","display_name":"Charles Tirrell","profile_url":"https://independent.academia.edu/CTirrell?f_ri=1185","photo":"https://gravatar.com/avatar/11b6cee12d4460a9dc8c29a39414b915?s=65"}</script></span></span><span class="u-displayInlineBlock InlineList-item-text">&nbsp;and&nbsp;<span class="u-textDecorationUnderline u-clickable InlineList-item-text js-work-more-authors-22269310">+1</span><div class="hidden js-additional-users-22269310"><div><span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a href="https://independent.academia.edu/RebeccaKamins">Rebecca Kamins</a></span></div></div></span><script>(function(){ var popoverSettings = { el: $('.js-work-more-authors-22269310'), placement: 'bottom', hide_delay: 200, html: true, content: function(){ return $('.js-additional-users-22269310').html(); } } new HoverPopover(popoverSettings); })();</script></li><li class="js-paper-rank-work_22269310 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="22269310"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 22269310, container: ".js-paper-rank-work_22269310", }); });</script></li><li class="js-percentile-work_22269310 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 22269310; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_22269310"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_22269310 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="22269310"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 22269310; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=22269310]").text(description); $(".js-view-count-work_22269310").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_22269310").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="22269310"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">8</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="156" rel="nofollow" href="https://www.academia.edu/Documents/in/Genetics">Genetics</a>,&nbsp;<script data-card-contents-for-ri="156" type="text/json">{"id":156,"name":"Genetics","url":"https://www.academia.edu/Documents/in/Genetics?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1648" rel="nofollow" href="https://www.academia.edu/Documents/in/Computed_Tomography">Computed Tomography</a>,&nbsp;<script data-card-contents-for-ri="1648" type="text/json">{"id":1648,"name":"Computed Tomography","url":"https://www.academia.edu/Documents/in/Computed_Tomography?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="26870" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_segmentation">Image segmentation</a><script data-card-contents-for-ri="26870" type="text/json">{"id":26870,"name":"Image segmentation","url":"https://www.academia.edu/Documents/in/Image_segmentation?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=22269310]'), work: {"id":22269310,"title":"\u003ctitle\u003eVisual enhancement of micro CT bone density images\u003c/title\u003e","created_at":"2016-02-21T18:20:53.994-08:00","url":"https://www.academia.edu/22269310/_title_Visual_enhancement_of_micro_CT_bone_density_images_title_?f_ri=1185","dom_id":"work_22269310","summary":"The primary goal of this research was to provide image processing support to aid in the identification of those subjects most affected by bone loss when exposed to weightlessness and provide insight into the causes for large variability. Past research has demonstrated that genetically distinct strains of mice exhibit different degrees of bone loss when subjected to simulated weightlessness. Bone loss is quantified by in vivo computed tomography (CT) imaging. The first step in evaluating bone density is to segment gray scale images into separate regions of bone and background. Two of the most common methods for implementing image segmentation are thresholding and edge detection. Thresholding is generally considered the simplest segmentation process which can be obtained by having a user visually select a threshold using a sliding scale. This is a highly subjective process with great potential for variation from one observer to another. One way to reduce inter-observer variability is to have several users independently set the threshold and average their results but this is a very time consuming process. A better approach is to apply an objective adaptive technique such as the Riddler / Calvard method. In our study we have concluded that thresholding was better than edge detection and pre-processing these images with an iterative deconvolution algorithm prior to adaptive thresholding yields superior visualization when compared with images that have not been pre-processed or images that have been pre-processed with a filter.","downloadable_attachments":[{"id":42918594,"asset_id":22269310,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":43658811,"first_name":"Charles","last_name":"Tirrell","domain_name":"independent","page_name":"CTirrell","display_name":"Charles Tirrell","profile_url":"https://independent.academia.edu/CTirrell?f_ri=1185","photo":"https://gravatar.com/avatar/11b6cee12d4460a9dc8c29a39414b915?s=65"},{"id":43933808,"first_name":"Rebecca","last_name":"Kamins","domain_name":"independent","page_name":"RebeccaKamins","display_name":"Rebecca Kamins","profile_url":"https://independent.academia.edu/RebeccaKamins?f_ri=1185","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":156,"name":"Genetics","url":"https://www.academia.edu/Documents/in/Genetics?f_ri=1185","nofollow":true},{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":1648,"name":"Computed Tomography","url":"https://www.academia.edu/Documents/in/Computed_Tomography?f_ri=1185","nofollow":true},{"id":26870,"name":"Image segmentation","url":"https://www.academia.edu/Documents/in/Image_segmentation?f_ri=1185","nofollow":true},{"id":81788,"name":"Edge Detection","url":"https://www.academia.edu/Documents/in/Edge_Detection?f_ri=1185"},{"id":139532,"name":"Bone Density","url":"https://www.academia.edu/Documents/in/Bone_Density?f_ri=1185"},{"id":516544,"name":"Bone Loss","url":"https://www.academia.edu/Documents/in/Bone_Loss?f_ri=1185"},{"id":1241310,"name":"Adaptive Thresholding","url":"https://www.academia.edu/Documents/in/Adaptive_Thresholding?f_ri=1185"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_17065130" data-work_id="17065130" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/17065130/Feature_extraction_on_vineyard_by_Gustafson_Kessel_FCM_and_K_means">Feature extraction on vineyard by Gustafson Kessel FCM and K-means</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">Image segmentation is a process by which an image is partitioned into regions with similar features. Many approaches have been proposed for color images segmentation, but Fuzzy C-Means has been widely used, because it has a good... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_17065130" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">Image segmentation is a process by which an image is partitioned into regions with similar features. Many approaches have been proposed for color images segmentation, but Fuzzy C-Means has been widely used, because it has a good performance in a wide class of images. However, it is not adequate for noisy images and it takes longer runtimes, as compared to other method like K-means. For this reason, several methods have been proposed to improve these weaknesses. Methods like Fuzzy C-Means with Gustafson-Kessel algorithm (FCM-GK), which improve its performance against the noise, but increase significantly the runtime. In this paper we propose to use the centroids generated by GK-FCM algorithms as seeding for K-means algorithm in order to accelerate the runtime and improve the performance of K-means with random seeding. These segmentation techniques were applied to feature extraction on vineyard images. Segmented images were evaluated using several quality parameters such as the rate of correctly classified area and runtime.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/17065130" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="39683844d8ea49a0ddc5f3e096dcd3bc" rel="nofollow" data-download="{&quot;attachment_id&quot;:42334765,&quot;asset_id&quot;:17065130,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/42334765/download_file?st=MTczOTcxMjAzNiw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="36541116" href="https://uinirioja.academia.edu/JavierTardaguila">Javier Tardaguila</a><script data-card-contents-for-user="36541116" type="text/json">{"id":36541116,"first_name":"Javier","last_name":"Tardaguila","domain_name":"uinirioja","page_name":"JavierTardaguila","display_name":"Javier Tardaguila","profile_url":"https://uinirioja.academia.edu/JavierTardaguila?f_ri=1185","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_17065130 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="17065130"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 17065130, container: ".js-paper-rank-work_17065130", }); });</script></li><li class="js-percentile-work_17065130 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17065130; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_17065130"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_17065130 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="17065130"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17065130; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17065130]").text(description); $(".js-view-count-work_17065130").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_17065130").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="17065130"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">12</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl10x"><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="2008" rel="nofollow" href="https://www.academia.edu/Documents/in/Machine_Learning">Machine Learning</a>,&nbsp;<script data-card-contents-for-ri="2008" type="text/json">{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="2009" rel="nofollow" href="https://www.academia.edu/Documents/in/Data_Mining">Data Mining</a>,&nbsp;<script data-card-contents-for-ri="2009" type="text/json">{"id":2009,"name":"Data Mining","url":"https://www.academia.edu/Documents/in/Data_Mining?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="26870" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_segmentation">Image segmentation</a><script data-card-contents-for-ri="26870" type="text/json">{"id":26870,"name":"Image segmentation","url":"https://www.academia.edu/Documents/in/Image_segmentation?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=17065130]'), work: {"id":17065130,"title":"Feature extraction on vineyard by Gustafson Kessel FCM and K-means","created_at":"2015-10-20T14:20:56.658-07:00","url":"https://www.academia.edu/17065130/Feature_extraction_on_vineyard_by_Gustafson_Kessel_FCM_and_K_means?f_ri=1185","dom_id":"work_17065130","summary":"Image segmentation is a process by which an image is partitioned into regions with similar features. Many approaches have been proposed for color images segmentation, but Fuzzy C-Means has been widely used, because it has a good performance in a wide class of images. However, it is not adequate for noisy images and it takes longer runtimes, as compared to other method like K-means. For this reason, several methods have been proposed to improve these weaknesses. Methods like Fuzzy C-Means with Gustafson-Kessel algorithm (FCM-GK), which improve its performance against the noise, but increase significantly the runtime. In this paper we propose to use the centroids generated by GK-FCM algorithms as seeding for K-means algorithm in order to accelerate the runtime and improve the performance of K-means with random seeding. These segmentation techniques were applied to feature extraction on vineyard images. Segmented images were evaluated using several quality parameters such as the rate of correctly classified area and runtime.","downloadable_attachments":[{"id":42334765,"asset_id":17065130,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":36541116,"first_name":"Javier","last_name":"Tardaguila","domain_name":"uinirioja","page_name":"JavierTardaguila","display_name":"Javier Tardaguila","profile_url":"https://uinirioja.academia.edu/JavierTardaguila?f_ri=1185","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":2008,"name":"Machine Learning","url":"https://www.academia.edu/Documents/in/Machine_Learning?f_ri=1185","nofollow":true},{"id":2009,"name":"Data Mining","url":"https://www.academia.edu/Documents/in/Data_Mining?f_ri=1185","nofollow":true},{"id":26870,"name":"Image segmentation","url":"https://www.academia.edu/Documents/in/Image_segmentation?f_ri=1185","nofollow":true},{"id":155938,"name":"Color Image Segmentation","url":"https://www.academia.edu/Documents/in/Color_Image_Segmentation?f_ri=1185"},{"id":160144,"name":"Feature Extraction","url":"https://www.academia.edu/Documents/in/Feature_Extraction?f_ri=1185"},{"id":173232,"name":"Vineyard","url":"https://www.academia.edu/Documents/in/Vineyard?f_ri=1185"},{"id":208441,"name":"FCM","url":"https://www.academia.edu/Documents/in/FCM?f_ri=1185"},{"id":627850,"name":"K Means","url":"https://www.academia.edu/Documents/in/K_Means?f_ri=1185"},{"id":798200,"name":"K means algorithm","url":"https://www.academia.edu/Documents/in/K_means_algorithm?f_ri=1185"},{"id":1800837,"name":"Image Color Analysis","url":"https://www.academia.edu/Documents/in/Image_Color_Analysis?f_ri=1185"},{"id":2131792,"name":"Fuzzy C-mean","url":"https://www.academia.edu/Documents/in/Fuzzy_C-mean?f_ri=1185"}]}, }) } })();</script></ul></li></ul></div></div><div class="u-borderBottom1 u-borderColorGrayLighter"><div class="clearfix u-pv7x u-mb0x js-work-card work_18650509" data-work_id="18650509" itemscope="itemscope" itemtype="https://schema.org/ScholarlyArticle"><div class="header"><div class="title u-fontSerif u-fs22 u-lineHeight1_3"><a class="u-tcGrayDarkest js-work-link" href="https://www.academia.edu/18650509/Requirements_for_a_real_Time_image_processing_language">Requirements for a real-Time image processing language</a></div></div><div class="u-pb4x u-mt3x"><div class="summary u-fs14 u-fw300 u-lineHeight1_5 u-tcGrayDarkest"><div class="summarized">ABSTRACT In this paper we discuss issues in real-time image processing, including applications, approaches and hardware. In particular, we discuss the failure of existing programming languages to support these considerations and present... <a class="more_link u-tcGrayDark u-linkUnstyled" data-container=".work_18650509" data-show=".complete" data-hide=".summarized" data-more-link-behavior="true" href="#">more</a></div><div class="complete hidden">ABSTRACT In this paper we discuss issues in real-time image processing, including applications, approaches and hardware. In particular, we discuss the failure of existing programming languages to support these considerations and present requirements for any language that can support real-time image processing.</div></div></div><ul class="InlineList u-ph0x u-fs13"><li class="InlineList-item logged_in_only"><div class="share_on_academia_work_button"><a class="academia_share Button Button--inverseBlue Button--sm js-bookmark-button" data-academia-share="Work/18650509" data-share-source="work_strip" data-spinner="small_white_hide_contents"><i class="fa fa-plus"></i><span class="work-strip-link-text u-ml1x" data-content="button_text">Bookmark</span></a></div></li><li class="InlineList-item"><div class="download"><a id="c12156f519d99f777db22b6c7fc44918" rel="nofollow" data-download="{&quot;attachment_id&quot;:42148872,&quot;asset_id&quot;:18650509,&quot;asset_type&quot;:&quot;Work&quot;,&quot;always_allow_download&quot;:false,&quot;track&quot;:null,&quot;button_location&quot;:&quot;work_strip&quot;,&quot;source&quot;:null,&quot;hide_modal&quot;:null}" class="Button Button--sm Button--inverseGreen js-download-button prompt_button doc_download" href="https://www.academia.edu/attachments/42148872/download_file?st=MTczOTcxMjAzNiw4LjIyMi4yMDguMTQ2&s=work_strip"><i class="fa fa-arrow-circle-o-down fa-lg"></i><span class="u-textUppercase u-ml1x" data-content="button_text">Download</span></a></div></li><li class="InlineList-item"><ul class="InlineList InlineList--bordered u-ph0x"><li class="InlineList-item InlineList-item--bordered"><span class="InlineList-item-text">by&nbsp;<span itemscope="itemscope" itemprop="author" itemtype="https://schema.org/Person"><a class="u-tcGrayDark u-fw700" data-has-card-for-user="38725005" href="https://setonhall.academia.edu/ThomasMarlowe">Thomas Marlowe</a><script data-card-contents-for-user="38725005" type="text/json">{"id":38725005,"first_name":"Thomas","last_name":"Marlowe","domain_name":"setonhall","page_name":"ThomasMarlowe","display_name":"Thomas Marlowe","profile_url":"https://setonhall.academia.edu/ThomasMarlowe?f_ri=1185","photo":"/images/s65_no_pic.png"}</script></span></span></li><li class="js-paper-rank-work_18650509 InlineList-item InlineList-item--bordered hidden"><span class="js-paper-rank-view hidden u-tcGrayDark" data-paper-rank-work-id="18650509"><i class="u-m1x fa fa-bar-chart"></i><strong class="js-paper-rank"></strong></span><script>$(function() { new Works.PaperRankView({ workId: 18650509, container: ".js-paper-rank-work_18650509", }); });</script></li><li class="js-percentile-work_18650509 InlineList-item InlineList-item--bordered hidden u-tcGrayDark"><span class="percentile-widget hidden"><span class="u-mr2x percentile-widget" style="display: none">•</span><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 18650509; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-percentile-work_18650509"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></li><li class="js-view-count-work_18650509 InlineList-item InlineList-item--bordered hidden"><div><span><span class="js-view-count view-count u-mr2x" data-work-id="18650509"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 18650509; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=18650509]").text(description); $(".js-view-count-work_18650509").attr('title', description).tooltip(); }); });</script></span><script>$(function() { $(".js-view-count-work_18650509").removeClass('hidden') })</script></div></li><li class="InlineList-item u-positionRelative" style="max-width: 250px"><div class="u-positionAbsolute" data-has-card-for-ri-list="18650509"><i class="fa fa-tag InlineList-item-icon u-positionRelative"></i>&nbsp;&nbsp;<a class="InlineList-item-text u-positionRelative">5</a>&nbsp;&nbsp;</div><span class="InlineList-item-text u-textTruncate u-pl9x"><a class="InlineList-item-text" data-has-card-for-ri="451" rel="nofollow" href="https://www.academia.edu/Documents/in/Programming_Languages">Programming Languages</a>,&nbsp;<script data-card-contents-for-ri="451" type="text/json">{"id":451,"name":"Programming Languages","url":"https://www.academia.edu/Documents/in/Programming_Languages?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="1185" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Processing">Image Processing</a>,&nbsp;<script data-card-contents-for-ri="1185" type="text/json">{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="3419" rel="nofollow" href="https://www.academia.edu/Documents/in/Multimedia">Multimedia</a>,&nbsp;<script data-card-contents-for-ri="3419" type="text/json">{"id":3419,"name":"Multimedia","url":"https://www.academia.edu/Documents/in/Multimedia?f_ri=1185","nofollow":true}</script><a class="InlineList-item-text" data-has-card-for-ri="50642" rel="nofollow" href="https://www.academia.edu/Documents/in/Virtual_Reality">Virtual Reality</a><script data-card-contents-for-ri="50642" type="text/json">{"id":50642,"name":"Virtual Reality","url":"https://www.academia.edu/Documents/in/Virtual_Reality?f_ri=1185","nofollow":true}</script></span></li><script>(function(){ if (true) { new Aedu.ResearchInterestListCard({ el: $('*[data-has-card-for-ri-list=18650509]'), work: {"id":18650509,"title":"Requirements for a real-Time image processing language","created_at":"2015-11-19T13:39:24.288-08:00","url":"https://www.academia.edu/18650509/Requirements_for_a_real_Time_image_processing_language?f_ri=1185","dom_id":"work_18650509","summary":"ABSTRACT In this paper we discuss issues in real-time image processing, including applications, approaches and hardware. In particular, we discuss the failure of existing programming languages to support these considerations and present requirements for any language that can support real-time image processing.","downloadable_attachments":[{"id":42148872,"asset_id":18650509,"asset_type":"Work","always_allow_download":false}],"ordered_authors":[{"id":38725005,"first_name":"Thomas","last_name":"Marlowe","domain_name":"setonhall","page_name":"ThomasMarlowe","display_name":"Thomas Marlowe","profile_url":"https://setonhall.academia.edu/ThomasMarlowe?f_ri=1185","photo":"/images/s65_no_pic.png"}],"research_interests":[{"id":451,"name":"Programming Languages","url":"https://www.academia.edu/Documents/in/Programming_Languages?f_ri=1185","nofollow":true},{"id":1185,"name":"Image Processing","url":"https://www.academia.edu/Documents/in/Image_Processing?f_ri=1185","nofollow":true},{"id":3419,"name":"Multimedia","url":"https://www.academia.edu/Documents/in/Multimedia?f_ri=1185","nofollow":true},{"id":50642,"name":"Virtual Reality","url":"https://www.academia.edu/Documents/in/Virtual_Reality?f_ri=1185","nofollow":true},{"id":229390,"name":"Real Time","url":"https://www.academia.edu/Documents/in/Real_Time?f_ri=1185"}]}, }) } })();</script></ul></li></ul></div></div></div><div class="u-taCenter Pagination"><ul class="pagination"><li class="next_page"><a href="/Documents/in/Image_Processing?after=50%2C18650509" rel="next">Next</a></li><li class="last next"><a href="/Documents/in/Image_Processing?page=last">Last &raquo;</a></li></ul></div></div><div class="hidden-xs hidden-sm"><div class="u-pl6x"><div style="width: 300px;"><div class="panel panel-flat u-mt7x"><div class="panel-heading u-p5x"><div class="u-tcGrayDark u-taCenter u-fw700 u-textUppercase">Related Topics</div></div><ul class="list-group"><li class="list-group-item media_v2 u-mt0x u-p3x"><div class="media-body"><div class="u-tcGrayDarker u-fw700"><a class="u-tcGrayDarker" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Vision">Computer Vision</a></div></div><div class="media-right media-middle"><a class="u-tcGreen u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-follow-ri-id="854">Follow</a><a class="u-tcGray u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-unfollow-ri-id="854">Following</a></div></li><li class="list-group-item media_v2 u-mt0x u-p3x"><div class="media-body"><div class="u-tcGrayDarker u-fw700"><a class="u-tcGrayDarker" rel="nofollow" href="https://www.academia.edu/Documents/in/Remote_Sensing">Remote Sensing</a></div></div><div class="media-right media-middle"><a class="u-tcGreen u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-follow-ri-id="1252">Follow</a><a class="u-tcGray u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-unfollow-ri-id="1252">Following</a></div></li><li class="list-group-item media_v2 u-mt0x u-p3x"><div class="media-body"><div class="u-tcGrayDarker u-fw700"><a class="u-tcGrayDarker" rel="nofollow" href="https://www.academia.edu/Documents/in/Machine_Learning">Machine Learning</a></div></div><div class="media-right media-middle"><a class="u-tcGreen u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-follow-ri-id="2008">Follow</a><a class="u-tcGray u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-unfollow-ri-id="2008">Following</a></div></li><li class="list-group-item media_v2 u-mt0x u-p3x"><div class="media-body"><div class="u-tcGrayDarker u-fw700"><a class="u-tcGrayDarker" rel="nofollow" href="https://www.academia.edu/Documents/in/Pattern_Recognition">Pattern Recognition</a></div></div><div class="media-right media-middle"><a class="u-tcGreen u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-follow-ri-id="5109">Follow</a><a class="u-tcGray u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-unfollow-ri-id="5109">Following</a></div></li><li class="list-group-item media_v2 u-mt0x u-p3x"><div class="media-body"><div class="u-tcGrayDarker u-fw700"><a class="u-tcGrayDarker" rel="nofollow" href="https://www.academia.edu/Documents/in/Artificial_Intelligence">Artificial Intelligence</a></div></div><div class="media-right media-middle"><a class="u-tcGreen u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-follow-ri-id="465">Follow</a><a class="u-tcGray u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-unfollow-ri-id="465">Following</a></div></li><li class="list-group-item media_v2 u-mt0x u-p3x"><div class="media-body"><div class="u-tcGrayDarker u-fw700"><a class="u-tcGrayDarker" rel="nofollow" href="https://www.academia.edu/Documents/in/Computer_Science">Computer Science</a></div></div><div class="media-right media-middle"><a class="u-tcGreen u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-follow-ri-id="422">Follow</a><a class="u-tcGray u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-unfollow-ri-id="422">Following</a></div></li><li class="list-group-item media_v2 u-mt0x u-p3x"><div class="media-body"><div class="u-tcGrayDarker u-fw700"><a class="u-tcGrayDarker" rel="nofollow" href="https://www.academia.edu/Documents/in/Geographic_Information_Systems_GIS_">Geographic Information Systems (GIS)</a></div></div><div class="media-right media-middle"><a class="u-tcGreen u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-follow-ri-id="1009312">Follow</a><a class="u-tcGray u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-unfollow-ri-id="1009312">Following</a></div></li><li class="list-group-item media_v2 u-mt0x u-p3x"><div class="media-body"><div class="u-tcGrayDarker u-fw700"><a class="u-tcGrayDarker" rel="nofollow" href="https://www.academia.edu/Documents/in/Digital_Image_Processing">Digital Image Processing</a></div></div><div class="media-right media-middle"><a class="u-tcGreen u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-follow-ri-id="35938">Follow</a><a class="u-tcGray u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-unfollow-ri-id="35938">Following</a></div></li><li class="list-group-item media_v2 u-mt0x u-p3x"><div class="media-body"><div class="u-tcGrayDarker u-fw700"><a class="u-tcGrayDarker" rel="nofollow" href="https://www.academia.edu/Documents/in/Image_Analysis">Image Analysis</a></div></div><div class="media-right media-middle"><a class="u-tcGreen u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-follow-ri-id="9351">Follow</a><a class="u-tcGray u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-unfollow-ri-id="9351">Following</a></div></li><li class="list-group-item media_v2 u-mt0x u-p3x"><div class="media-body"><div class="u-tcGrayDarker u-fw700"><a class="u-tcGrayDarker" rel="nofollow" href="https://www.academia.edu/Documents/in/Soft_Computing_Image_Processing_and_Robotics">Soft Computing, Image Processing and Robotics</a></div></div><div class="media-right media-middle"><a class="u-tcGreen u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-follow-ri-id="9898">Follow</a><a class="u-tcGray u-textDecorationNone u-linkUnstyled u-fw500 hidden" data-unfollow-ri-id="9898">Following</a></div></li></ul></div></div></div></div></div></div><script>// MIT License // Copyright © 2011 Sebastian Tschan, https://blueimp.net // Permission is hereby granted, free of charge, to any person obtaining a copy of // this software and associated documentation files (the "Software"), to deal in // the Software without restriction, including without limitation the rights to // use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of // the Software, and to permit persons to whom the Software is furnished to do so, // subject to the following conditions: // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS // FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR // COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER // IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN // CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. !function(n){"use strict";function d(n,t){var r=(65535&n)+(65535&t);return(n>>16)+(t>>16)+(r>>16)<<16|65535&r}function f(n,t,r,e,o,u){return d((c=d(d(t,n),d(e,u)))<<(f=o)|c>>>32-f,r);var c,f}function l(n,t,r,e,o,u,c){return f(t&r|~t&e,n,t,o,u,c)}function v(n,t,r,e,o,u,c){return f(t&e|r&~e,n,t,o,u,c)}function g(n,t,r,e,o,u,c){return f(t^r^e,n,t,o,u,c)}function m(n,t,r,e,o,u,c){return f(r^(t|~e),n,t,o,u,c)}function i(n,t){var r,e,o,u;n[t>>5]|=128<<t%32,n[14+(t+64>>>9<<4)]=t;for(var c=1732584193,f=-271733879,i=-1732584194,a=271733878,h=0;h<n.length;h+=16)c=l(r=c,e=f,o=i,u=a,n[h],7,-680876936),a=l(a,c,f,i,n[h+1],12,-389564586),i=l(i,a,c,f,n[h+2],17,606105819),f=l(f,i,a,c,n[h+3],22,-1044525330),c=l(c,f,i,a,n[h+4],7,-176418897),a=l(a,c,f,i,n[h+5],12,1200080426),i=l(i,a,c,f,n[h+6],17,-1473231341),f=l(f,i,a,c,n[h+7],22,-45705983),c=l(c,f,i,a,n[h+8],7,1770035416),a=l(a,c,f,i,n[h+9],12,-1958414417),i=l(i,a,c,f,n[h+10],17,-42063),f=l(f,i,a,c,n[h+11],22,-1990404162),c=l(c,f,i,a,n[h+12],7,1804603682),a=l(a,c,f,i,n[h+13],12,-40341101),i=l(i,a,c,f,n[h+14],17,-1502002290),c=v(c,f=l(f,i,a,c,n[h+15],22,1236535329),i,a,n[h+1],5,-165796510),a=v(a,c,f,i,n[h+6],9,-1069501632),i=v(i,a,c,f,n[h+11],14,643717713),f=v(f,i,a,c,n[h],20,-373897302),c=v(c,f,i,a,n[h+5],5,-701558691),a=v(a,c,f,i,n[h+10],9,38016083),i=v(i,a,c,f,n[h+15],14,-660478335),f=v(f,i,a,c,n[h+4],20,-405537848),c=v(c,f,i,a,n[h+9],5,568446438),a=v(a,c,f,i,n[h+14],9,-1019803690),i=v(i,a,c,f,n[h+3],14,-187363961),f=v(f,i,a,c,n[h+8],20,1163531501),c=v(c,f,i,a,n[h+13],5,-1444681467),a=v(a,c,f,i,n[h+2],9,-51403784),i=v(i,a,c,f,n[h+7],14,1735328473),c=g(c,f=v(f,i,a,c,n[h+12],20,-1926607734),i,a,n[h+5],4,-378558),a=g(a,c,f,i,n[h+8],11,-2022574463),i=g(i,a,c,f,n[h+11],16,1839030562),f=g(f,i,a,c,n[h+14],23,-35309556),c=g(c,f,i,a,n[h+1],4,-1530992060),a=g(a,c,f,i,n[h+4],11,1272893353),i=g(i,a,c,f,n[h+7],16,-155497632),f=g(f,i,a,c,n[h+10],23,-1094730640),c=g(c,f,i,a,n[h+13],4,681279174),a=g(a,c,f,i,n[h],11,-358537222),i=g(i,a,c,f,n[h+3],16,-722521979),f=g(f,i,a,c,n[h+6],23,76029189),c=g(c,f,i,a,n[h+9],4,-640364487),a=g(a,c,f,i,n[h+12],11,-421815835),i=g(i,a,c,f,n[h+15],16,530742520),c=m(c,f=g(f,i,a,c,n[h+2],23,-995338651),i,a,n[h],6,-198630844),a=m(a,c,f,i,n[h+7],10,1126891415),i=m(i,a,c,f,n[h+14],15,-1416354905),f=m(f,i,a,c,n[h+5],21,-57434055),c=m(c,f,i,a,n[h+12],6,1700485571),a=m(a,c,f,i,n[h+3],10,-1894986606),i=m(i,a,c,f,n[h+10],15,-1051523),f=m(f,i,a,c,n[h+1],21,-2054922799),c=m(c,f,i,a,n[h+8],6,1873313359),a=m(a,c,f,i,n[h+15],10,-30611744),i=m(i,a,c,f,n[h+6],15,-1560198380),f=m(f,i,a,c,n[h+13],21,1309151649),c=m(c,f,i,a,n[h+4],6,-145523070),a=m(a,c,f,i,n[h+11],10,-1120210379),i=m(i,a,c,f,n[h+2],15,718787259),f=m(f,i,a,c,n[h+9],21,-343485551),c=d(c,r),f=d(f,e),i=d(i,o),a=d(a,u);return[c,f,i,a]}function a(n){for(var t="",r=32*n.length,e=0;e<r;e+=8)t+=String.fromCharCode(n[e>>5]>>>e%32&255);return t}function h(n){var t=[];for(t[(n.length>>2)-1]=void 0,e=0;e<t.length;e+=1)t[e]=0;for(var r=8*n.length,e=0;e<r;e+=8)t[e>>5]|=(255&n.charCodeAt(e/8))<<e%32;return t}function e(n){for(var t,r="0123456789abcdef",e="",o=0;o<n.length;o+=1)t=n.charCodeAt(o),e+=r.charAt(t>>>4&15)+r.charAt(15&t);return e}function r(n){return unescape(encodeURIComponent(n))}function o(n){return a(i(h(t=r(n)),8*t.length));var t}function u(n,t){return function(n,t){var r,e,o=h(n),u=[],c=[];for(u[15]=c[15]=void 0,16<o.length&&(o=i(o,8*n.length)),r=0;r<16;r+=1)u[r]=909522486^o[r],c[r]=1549556828^o[r];return e=i(u.concat(h(t)),512+8*t.length),a(i(c.concat(e),640))}(r(n),r(t))}function t(n,t,r){return t?r?u(t,n):e(u(t,n)):r?o(n):e(o(n))}"function"==typeof define&&define.amd?define(function(){return t}):"object"==typeof module&&module.exports?module.exports=t:n.md5=t}(this);</script><script>window.AbTest = (function() { return { 'ab_test': (uniqueId, test_name, buckets) => { let override = new URLSearchParams(window.location.search).get(`ab_test[${test_name}]`); if ( override ) { return override; } const bucketNames = buckets.map((bucket) => { return typeof bucket === 'string' ? bucket : Object.keys(bucket)[0]; }); const weights = buckets.map((bucket) => { return typeof bucket === 'string' ? 1 : Object.values(bucket)[0]; }); const total = weights.reduce((sum, weight) => sum + weight); const hash = md5(`${uniqueId}${test_name}`); const hashNum = parseInt(hash.slice(-12), 16); let bucketPoint = total * (hashNum % 100000) / 100000; const bucket = bucketNames.find((_, i) => { if (weights[i] > bucketPoint) { return true; } bucketPoint -= weights[i]; return false; }); return bucket; } }; })();</script><div data-auto_select="false" data-client_id="331998490334-rsn3chp12mbkiqhl6e7lu2q0mlbu0f1b" data-landing_url="https://www.academia.edu/Documents/in/Image_Processing" data-login_uri="https://www.academia.edu/registrations/google_one_tap" data-moment_callback="onGoogleOneTapEvent" id="g_id_onload"></div><script>function onGoogleOneTapEvent(event) { var momentType = event.getMomentType(); var momentReason = null; if (event.isNotDisplayed()) { momentReason = event.getNotDisplayedReason(); } else if (event.isSkippedMoment()) { momentReason = event.getSkippedReason(); } else if (event.isDismissedMoment()) { momentReason = event.getDismissedReason(); } Aedu.arbitraryEvents.write('GoogleOneTapEvent', { moment_type: momentType, moment_reason: momentReason, }); }</script><script>(function() { var auvid = unescape( document.cookie .split(/; ?/) .find((s) => s.startsWith('auvid')) .substring(6)); var bucket = AbTest.ab_test(auvid, 'lo_ri_one_tap_google_sign_on', ['control', 'one_tap_google_sign_on']); if (bucket === 'control') return; var oneTapTag = document.createElement('script') oneTapTag.async = true oneTapTag.defer = true oneTapTag.src = 'https://accounts.google.com/gsi/client' document.body.appendChild(oneTapTag) })();</script></div></div></div> </div> <div class="bootstrap login"><div class="modal fade login-modal" id="login-modal"><div class="login-modal-dialog modal-dialog"><div class="modal-content"><div class="modal-header"><button class="close close" data-dismiss="modal" type="button"><span aria-hidden="true">&times;</span><span class="sr-only">Close</span></button><h4 class="modal-title text-center"><strong>Log In</strong></h4></div><div class="modal-body"><div class="row"><div class="col-xs-10 col-xs-offset-1"><button class="btn btn-fb btn-lg btn-block btn-v-center-content" id="login-facebook-oauth-button"><svg style="float: left; width: 19px; line-height: 1em; margin-right: .3em;" aria-hidden="true" focusable="false" data-prefix="fab" data-icon="facebook-square" class="svg-inline--fa fa-facebook-square fa-w-14" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 512"><path fill="currentColor" d="M400 32H48A48 48 0 0 0 0 80v352a48 48 0 0 0 48 48h137.25V327.69h-63V256h63v-54.64c0-62.15 37-96.48 93.67-96.48 27.14 0 55.52 4.84 55.52 4.84v61h-31.27c-30.81 0-40.42 19.12-40.42 38.73V256h68.78l-11 71.69h-57.78V480H400a48 48 0 0 0 48-48V80a48 48 0 0 0-48-48z"></path></svg><small><strong>Log in</strong> with <strong>Facebook</strong></small></button><br /><button class="btn btn-google btn-lg btn-block btn-v-center-content" id="login-google-oauth-button"><svg style="float: left; width: 22px; line-height: 1em; margin-right: .3em;" aria-hidden="true" focusable="false" data-prefix="fab" data-icon="google-plus" class="svg-inline--fa fa-google-plus fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M256,8C119.1,8,8,119.1,8,256S119.1,504,256,504,504,392.9,504,256,392.9,8,256,8ZM185.3,380a124,124,0,0,1,0-248c31.3,0,60.1,11,83,32.3l-33.6,32.6c-13.2-12.9-31.3-19.1-49.4-19.1-42.9,0-77.2,35.5-77.2,78.1S142.3,334,185.3,334c32.6,0,64.9-19.1,70.1-53.3H185.3V238.1H302.2a109.2,109.2,0,0,1,1.9,20.7c0,70.8-47.5,121.2-118.8,121.2ZM415.5,273.8v35.5H380V273.8H344.5V238.3H380V202.8h35.5v35.5h35.2v35.5Z"></path></svg><small><strong>Log in</strong> with <strong>Google</strong></small></button><br /><style type="text/css">.sign-in-with-apple-button { width: 100%; height: 52px; border-radius: 3px; border: 1px solid black; cursor: pointer; } .sign-in-with-apple-button > div { margin: 0 auto; / This centers the Apple-rendered button horizontally }</style><script src="https://appleid.cdn-apple.com/appleauth/static/jsapi/appleid/1/en_US/appleid.auth.js" type="text/javascript"></script><div class="sign-in-with-apple-button" data-border="false" data-color="white" id="appleid-signin"><span &nbsp;&nbsp;="Sign Up with Apple" class="u-fs11"></span></div><script>AppleID.auth.init({ clientId: 'edu.academia.applesignon', scope: 'name email', redirectURI: 'https://www.academia.edu/sessions', state: "cfa8c6610eb318a20e40a721a7a972331eb507e7ccf5142e871fae7e0ea6dba8", });</script><script>// Hacky way of checking if on fast loswp if (window.loswp == null) { (function() { const Google = window?.Aedu?.Auth?.OauthButton?.Login?.Google; const Facebook = window?.Aedu?.Auth?.OauthButton?.Login?.Facebook; if (Google) { new Google({ el: '#login-google-oauth-button', rememberMeCheckboxId: 'remember_me', track: null }); } if (Facebook) { new Facebook({ el: '#login-facebook-oauth-button', rememberMeCheckboxId: 'remember_me', track: null }); } })(); }</script></div></div></div><div class="modal-body"><div class="row"><div class="col-xs-10 col-xs-offset-1"><div class="hr-heading login-hr-heading"><span class="hr-heading-text">or</span></div></div></div></div><div class="modal-body"><div class="row"><div class="col-xs-10 col-xs-offset-1"><form class="js-login-form" action="https://www.academia.edu/sessions" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="ln4StBT0D_yySdgekOsatFAEd1C-SqB8G9vZnecbjwGiMaxX6wc8ELQjR0gxOp039EnlEc5bViUgR8Q7f9nSOw" autocomplete="off" /><div class="form-group"><label class="control-label" for="login-modal-email-input" style="font-size: 14px;">Email</label><input class="form-control" id="login-modal-email-input" name="login" type="email" /></div><div class="form-group"><label class="control-label" for="login-modal-password-input" style="font-size: 14px;">Password</label><input class="form-control" id="login-modal-password-input" name="password" type="password" /></div><input type="hidden" name="post_login_redirect_url" id="post_login_redirect_url" value="https://www.academia.edu/Documents/in/Image_Processing" autocomplete="off" /><div class="checkbox"><label><input type="checkbox" name="remember_me" id="remember_me" value="1" checked="checked" /><small style="font-size: 12px; margin-top: 2px; display: inline-block;">Remember me on this computer</small></label></div><br><input type="submit" name="commit" value="Log In" class="btn btn-primary btn-block btn-lg js-login-submit" data-disable-with="Log In" /></br></form><script>typeof window?.Aedu?.recaptchaManagedForm === 'function' && window.Aedu.recaptchaManagedForm( document.querySelector('.js-login-form'), document.querySelector('.js-login-submit') );</script><small style="font-size: 12px;"><br />or <a data-target="#login-modal-reset-password-container" data-toggle="collapse" href="javascript:void(0)">reset password</a></small><div class="collapse" id="login-modal-reset-password-container"><br /><div class="well margin-0x"><form class="js-password-reset-form" action="https://www.academia.edu/reset_password" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="JZGaXRlPf6ao0Hb-vrCcNFP_v1gok2GLQhUaNz-n7I0R3iS-5rxMSq666agfYRu397ItGViCl9J5iQeRp2Wxtw" autocomplete="off" /><p>Enter the email address you signed up with and we&#39;ll email you a reset link.</p><div class="form-group"><input class="form-control" name="email" type="email" /></div><script src="https://recaptcha.net/recaptcha/api.js" async defer></script> <script> var invisibleRecaptchaSubmit = function () { var closestForm = function (ele) { var curEle = ele.parentNode; while (curEle.nodeName !== 'FORM' && curEle.nodeName !== 'BODY'){ curEle = curEle.parentNode; } return curEle.nodeName === 'FORM' ? curEle : null }; var eles = document.getElementsByClassName('g-recaptcha'); if (eles.length > 0) { var form = closestForm(eles[0]); if (form) { form.submit(); } } }; </script> <input type="submit" data-sitekey="6Lf3KHUUAAAAACggoMpmGJdQDtiyrjVlvGJ6BbAj" data-callback="invisibleRecaptchaSubmit" class="g-recaptcha btn btn-primary btn-block" value="Email me a link" value=""/> </form></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/collapse-45805421cf446ca5adf7aaa1935b08a3a8d1d9a6cc5d91a62a2a3a00b20b3e6a.js"], function() { // from javascript_helper.rb $("#login-modal-reset-password-container").on("shown.bs.collapse", function() { $(this).find("input[type=email]").focus(); }); }); </script> </div></div></div><div class="modal-footer"><div class="text-center"><small style="font-size: 12px;">Need an account?&nbsp;<a rel="nofollow" href="https://www.academia.edu/signup">Click here to sign up</a></small></div></div></div></div></div></div><script>// If we are on subdomain or non-bootstrapped page, redirect to login page instead of showing modal (function(){ if (typeof $ === 'undefined') return; var host = window.location.hostname; if ((host === $domain || host === "www."+$domain) && (typeof $().modal === 'function')) { $("#nav_log_in").click(function(e) { // Don't follow the link and open the modal e.preventDefault(); $("#login-modal").on('shown.bs.modal', function() { $(this).find("#login-modal-email-input").focus() }).modal('show'); }); } })()</script> <div class="bootstrap" id="footer"><div class="footer-content clearfix text-center padding-top-7x" style="width:100%;"><ul class="footer-links-secondary footer-links-wide list-inline margin-bottom-1x"><li><a href="https://www.academia.edu/about">About</a></li><li><a href="https://www.academia.edu/press">Press</a></li><li><a href="https://www.academia.edu/documents">Papers</a></li><li><a href="https://www.academia.edu/topics">Topics</a></li><li><a href="https://www.academia.edu/journals">Academia.edu Journals</a></li><li><a rel="nofollow" href="https://www.academia.edu/hiring"><svg style="width: 13px; height: 13px;" aria-hidden="true" focusable="false" data-prefix="fas" data-icon="briefcase" class="svg-inline--fa fa-briefcase fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M320 336c0 8.84-7.16 16-16 16h-96c-8.84 0-16-7.16-16-16v-48H0v144c0 25.6 22.4 48 48 48h416c25.6 0 48-22.4 48-48V288H320v48zm144-208h-80V80c0-25.6-22.4-48-48-48H176c-25.6 0-48 22.4-48 48v48H48c-25.6 0-48 22.4-48 48v80h512v-80c0-25.6-22.4-48-48-48zm-144 0H192V96h128v32z"></path></svg>&nbsp;<strong>We're Hiring!</strong></a></li><li><a rel="nofollow" href="https://support.academia.edu/hc/en-us"><svg style="width: 12px; height: 12px;" aria-hidden="true" focusable="false" data-prefix="fas" data-icon="question-circle" class="svg-inline--fa fa-question-circle fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M504 256c0 136.997-111.043 248-248 248S8 392.997 8 256C8 119.083 119.043 8 256 8s248 111.083 248 248zM262.655 90c-54.497 0-89.255 22.957-116.549 63.758-3.536 5.286-2.353 12.415 2.715 16.258l34.699 26.31c5.205 3.947 12.621 3.008 16.665-2.122 17.864-22.658 30.113-35.797 57.303-35.797 20.429 0 45.698 13.148 45.698 32.958 0 14.976-12.363 22.667-32.534 33.976C247.128 238.528 216 254.941 216 296v4c0 6.627 5.373 12 12 12h56c6.627 0 12-5.373 12-12v-1.333c0-28.462 83.186-29.647 83.186-106.667 0-58.002-60.165-102-116.531-102zM256 338c-25.365 0-46 20.635-46 46 0 25.364 20.635 46 46 46s46-20.636 46-46c0-25.365-20.635-46-46-46z"></path></svg>&nbsp;<strong>Help Center</strong></a></li></ul><ul class="footer-links-tertiary list-inline margin-bottom-1x"><li class="small">Find new research papers in:</li><li class="small"><a href="https://www.academia.edu/Documents/in/Physics">Physics</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Chemistry">Chemistry</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Biology">Biology</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Health_Sciences">Health Sciences</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Ecology">Ecology</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Earth_Sciences">Earth Sciences</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Cognitive_Science">Cognitive Science</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Mathematics">Mathematics</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Computer_Science">Computer Science</a></li></ul></div></div><div class="DesignSystem" id="credit" style="width:100%;"><ul class="u-pl0x footer-links-legal list-inline"><li><a rel="nofollow" href="https://www.academia.edu/terms">Terms</a></li><li><a rel="nofollow" href="https://www.academia.edu/privacy">Privacy</a></li><li><a rel="nofollow" href="https://www.academia.edu/copyright">Copyright</a></li><li>Academia &copy;2025</li></ul></div><script> //<![CDATA[ window.detect_gmtoffset = true; window.Academia && window.Academia.set_gmtoffset && Academia.set_gmtoffset('/gmtoffset'); //]]> </script> <div id='overlay_background'></div> <div id='bootstrap-modal-container' class='bootstrap'></div> <div id='ds-modal-container' class='bootstrap DesignSystem'></div> <div id='full-screen-modal'></div> </div> </body> </html>

Pages: 1 2 3 4 5 6 7 8 9 10