CINXE.COM

Paramate Horkaew | SURANAREE UNIVERSITY OF TECHNOLOGY - Academia.edu

<!DOCTYPE html> <html lang="en" xmlns:fb="http://www.facebook.com/2008/fbml" class="wf-loading"> <head prefix="og: https://ogp.me/ns# fb: https://ogp.me/ns/fb# academia: https://ogp.me/ns/fb/academia#"> <meta charset="utf-8"> <meta name=viewport content="width=device-width, initial-scale=1"> <meta rel="search" type="application/opensearchdescription+xml" href="/open_search.xml" title="Academia.edu"> <title>Paramate Horkaew | SURANAREE UNIVERSITY OF TECHNOLOGY - Academia.edu</title> <!-- _ _ _ | | (_) | | __ _ ___ __ _ __| | ___ _ __ ___ _ __ _ ___ __| |_ _ / _` |/ __/ _` |/ _` |/ _ \ '_ ` _ \| |/ _` | / _ \/ _` | | | | | (_| | (_| (_| | (_| | __/ | | | | | | (_| || __/ (_| | |_| | \__,_|\___\__,_|\__,_|\___|_| |_| |_|_|\__,_(_)___|\__,_|\__,_| We're hiring! See https://www.academia.edu/hiring --> <link href="//a.academia-assets.com/images/favicons/favicon-production.ico" rel="shortcut icon" type="image/vnd.microsoft.icon"> <link rel="apple-touch-icon" sizes="57x57" href="//a.academia-assets.com/images/favicons/apple-touch-icon-57x57.png"> <link rel="apple-touch-icon" sizes="60x60" href="//a.academia-assets.com/images/favicons/apple-touch-icon-60x60.png"> <link rel="apple-touch-icon" sizes="72x72" href="//a.academia-assets.com/images/favicons/apple-touch-icon-72x72.png"> <link rel="apple-touch-icon" sizes="76x76" href="//a.academia-assets.com/images/favicons/apple-touch-icon-76x76.png"> <link rel="apple-touch-icon" sizes="114x114" href="//a.academia-assets.com/images/favicons/apple-touch-icon-114x114.png"> <link rel="apple-touch-icon" sizes="120x120" href="//a.academia-assets.com/images/favicons/apple-touch-icon-120x120.png"> <link rel="apple-touch-icon" sizes="144x144" href="//a.academia-assets.com/images/favicons/apple-touch-icon-144x144.png"> <link rel="apple-touch-icon" sizes="152x152" href="//a.academia-assets.com/images/favicons/apple-touch-icon-152x152.png"> <link rel="apple-touch-icon" sizes="180x180" href="//a.academia-assets.com/images/favicons/apple-touch-icon-180x180.png"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-32x32.png" sizes="32x32"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-194x194.png" sizes="194x194"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-96x96.png" sizes="96x96"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/android-chrome-192x192.png" sizes="192x192"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-16x16.png" sizes="16x16"> <link rel="manifest" href="//a.academia-assets.com/images/favicons/manifest.json"> <meta name="msapplication-TileColor" content="#2b5797"> <meta name="msapplication-TileImage" content="//a.academia-assets.com/images/favicons/mstile-144x144.png"> <meta name="theme-color" content="#ffffff"> <script> window.performance && window.performance.measure && window.performance.measure("Time To First Byte", "requestStart", "responseStart"); </script> <script> (function() { if (!window.URLSearchParams || !window.history || !window.history.replaceState) { return; } var searchParams = new URLSearchParams(window.location.search); var paramsToDelete = [ 'fs', 'sm', 'swp', 'iid', 'nbs', 'rcc', // related content category 'rcpos', // related content carousel position 'rcpg', // related carousel page 'rchid', // related content hit id 'f_ri', // research interest id, for SEO tracking 'f_fri', // featured research interest, for SEO tracking (param key without value) 'f_rid', // from research interest directory for SEO tracking 'f_loswp', // from research interest pills on LOSWP sidebar for SEO tracking 'rhid', // referrring hit id ]; if (paramsToDelete.every((key) => searchParams.get(key) === null)) { return; } paramsToDelete.forEach((key) => { searchParams.delete(key); }); var cleanUrl = new URL(window.location.href); cleanUrl.search = searchParams.toString(); history.replaceState({}, document.title, cleanUrl); })(); </script> <script async src="https://www.googletagmanager.com/gtag/js?id=G-5VKX33P2DS"></script> <script> window.dataLayer = window.dataLayer || []; function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-5VKX33P2DS', { cookie_domain: 'academia.edu', send_page_view: false, }); gtag('event', 'page_view', { 'controller': "profiles/works", 'action': "summary", 'controller_action': 'profiles/works#summary', 'logged_in': 'false', 'edge': 'unknown', // Send nil if there is no A/B test bucket, in case some records get logged // with missing data - that way we can distinguish between the two cases. // ab_test_bucket should be of the form <ab_test_name>:<bucket> 'ab_test_bucket': null, }) </script> <script type="text/javascript"> window.sendUserTiming = function(timingName) { if (!(window.performance && window.performance.measure)) return; var entries = window.performance.getEntriesByName(timingName, "measure"); if (entries.length !== 1) return; var timingValue = Math.round(entries[0].duration); gtag('event', 'timing_complete', { name: timingName, value: timingValue, event_category: 'User-centric', }); }; window.sendUserTiming("Time To First Byte"); </script> <meta name="csrf-param" content="authenticity_token" /> <meta name="csrf-token" content="UcB5NfcG5WgbpnYn3LNN7dDNzpJKlxTDVjNxzqZp6P8yWIJ851XLOtaC3qODRae1DMnMeBk440OfgAB82Phx6w" /> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/wow-3d36c19b4875b226bfed0fcba1dcea3f2fe61148383d97c0465c016b8c969290.css" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/social/home-79e78ce59bef0a338eb6540ec3d93b4a7952115b56c57f1760943128f4544d42.css" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/heading-95367dc03b794f6737f30123738a886cf53b7a65cdef98a922a98591d60063e3.css" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/button-bfbac2a470372e2f3a6661a65fa7ff0a0fbf7aa32534d9a831d683d2a6f9e01b.css" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/body-170d1319f0e354621e81ca17054bb147da2856ec0702fe440a99af314a6338c5.css" /><link crossorigin="" href="https://fonts.gstatic.com/" rel="preconnect" /><link href="https://fonts.googleapis.com/css2?family=DM+Sans:ital,opsz,wght@0,9..40,100..1000;1,9..40,100..1000&amp;family=Gupter:wght@400;500;700&amp;family=IBM+Plex+Mono:wght@300;400&amp;family=Material+Symbols+Outlined:opsz,wght,FILL,GRAD@20,400,0,0&amp;display=swap" rel="stylesheet" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/common-2b6f90dbd75f5941bc38f4ad716615f3ac449e7398313bb3bc225fba451cd9fa.css" /> <meta name="author" content="paramate horkaew" /> <meta name="description" content="Dr. Horkaew received his B.Eng. (1st Hons.) in Telecommunication Engineering (1999) from King Mongkut&#39;s Institute of Technology, Ladkrabang (KMITL). During his…" /> <meta name="google-site-verification" content="bKJMBZA7E43xhDOopFZkssMMkBRjvYERV-NaN4R6mrs" /> <script> var $controller_name = 'works'; var $action_name = "summary"; var $rails_env = 'production'; var $app_rev = '5639aa774042421bc82977ffa1287a882e92510e'; var $domain = 'academia.edu'; var $app_host = "academia.edu"; var $asset_host = "academia-assets.com"; var $start_time = new Date().getTime(); var $recaptcha_key = "6LdxlRMTAAAAADnu_zyLhLg0YF9uACwz78shpjJB"; var $recaptcha_invisible_key = "6Lf3KHUUAAAAACggoMpmGJdQDtiyrjVlvGJ6BbAj"; var $disableClientRecordHit = false; </script> <script> window.Aedu = { hit_data: null }; window.Aedu.SiteStats = {"premium_universities_count":14023,"monthly_visitors":"31 million","monthly_visitor_count":31300000,"monthly_visitor_count_in_millions":31,"user_count":283006793,"paper_count":55203019,"paper_count_in_millions":55,"page_count":432000000,"page_count_in_millions":432,"pdf_count":16500000,"pdf_count_in_millions":16}; window.Aedu.serverRenderTime = new Date(1740159045000); window.Aedu.timeDifference = new Date().getTime() - 1740159045000; window.Aedu.isUsingCssV1 = false; window.Aedu.enableLocalization = true; window.Aedu.activateFullstory = false; window.Aedu.serviceAvailability = { status: {"attention_db":"on","bibliography_db":"on","contacts_db":"on","email_db":"on","indexability_db":"on","mentions_db":"on","news_db":"on","notifications_db":"on","offsite_mentions_db":"on","redshift":"on","redshift_exports_db":"on","related_works_db":"on","ring_db":"on","user_tests_db":"on"}, serviceEnabled: function(service) { return this.status[service] === "on"; }, readEnabled: function(service) { return this.serviceEnabled(service) || this.status[service] === "read_only"; }, }; window.Aedu.viewApmTrace = function() { // Check if x-apm-trace-id meta tag is set, and open the trace in APM // in a new window if it is. var apmTraceId = document.head.querySelector('meta[name="x-apm-trace-id"]'); if (apmTraceId) { var traceId = apmTraceId.content; // Use trace ID to construct URL, an example URL looks like: // https://app.datadoghq.com/apm/traces?query=trace_id%31298410148923562634 var apmUrl = 'https://app.datadoghq.com/apm/traces?query=trace_id%3A' + traceId; window.open(apmUrl, '_blank'); } }; </script> <!--[if lt IE 9]> <script src="//cdnjs.cloudflare.com/ajax/libs/html5shiv/3.7.2/html5shiv.min.js"></script> <![endif]--> <link href="https://fonts.googleapis.com/css?family=Roboto:100,100i,300,300i,400,400i,500,500i,700,700i,900,900i" rel="stylesheet"> <link rel="preload" href="//maxcdn.bootstrapcdn.com/font-awesome/4.3.0/css/font-awesome.min.css" as="style" onload="this.rel='stylesheet'"> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/libraries-a9675dcb01ec4ef6aa807ba772c7a5a00c1820d3ff661c1038a20f80d06bb4e4.css" /> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/academia-1eb081e01ca8bc0c1b1d866df79d9eb4dd2c484e4beecf76e79a7806c72fee08.css" /> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system_legacy-056a9113b9a0f5343d013b29ee1929d5a18be35fdcdceb616600b4db8bd20054.css" /> <script src="//a.academia-assets.com/assets/webpack_bundles/runtime-bundle-005434038af4252ca37c527588411a3d6a0eabb5f727fac83f8bbe7fd88d93bb.js"></script> <script src="//a.academia-assets.com/assets/webpack_bundles/webpack_libraries_and_infrequently_changed.wjs-bundle-b42d48e01e9cb3bf5ae55ea154ab7133fe47240c1d7c59b0249065c67d6f65ab.js"></script> <script src="//a.academia-assets.com/assets/webpack_bundles/core_webpack.wjs-bundle-f3fdf92d2e211a3fd7a717a75adb7bf4af30f5d7651964d9f223129f218112cd.js"></script> <script src="//a.academia-assets.com/assets/webpack_bundles/sentry.wjs-bundle-5fe03fddca915c8ba0f7edbe64c194308e8ce5abaed7bffe1255ff37549c4808.js"></script> <script> jade = window.jade || {}; jade.helpers = window.$h; jade._ = window._; </script> <!-- Google Tag Manager --> <script id="tag-manager-head-root">(function(w,d,s,l,i){w[l]=w[l]||[];w[l].push({'gtm.start': new Date().getTime(),event:'gtm.js'});var f=d.getElementsByTagName(s)[0], j=d.createElement(s),dl=l!='dataLayer'?'&l='+l:'';j.async=true;j.src= 'https://www.googletagmanager.com/gtm.js?id='+i+dl;f.parentNode.insertBefore(j,f); })(window,document,'script','dataLayer_old','GTM-5G9JF7Z');</script> <!-- End Google Tag Manager --> <script> window.gptadslots = []; window.googletag = window.googletag || {}; window.googletag.cmd = window.googletag.cmd || []; </script> <script type="text/javascript"> // TODO(jacob): This should be defined, may be rare load order problem. // Checking if null is just a quick fix, will default to en if unset. // Better fix is to run this immedietely after I18n is set. if (window.I18n != null) { I18n.defaultLocale = "en"; I18n.locale = "en"; I18n.fallbacks = true; } </script> <link rel="canonical" href="https://biotech.academia.edu/ParamateHorkaew" /> </head> <!--[if gte IE 9 ]> <body class='ie ie9 c-profiles/works a-summary logged_out'> <![endif]--> <!--[if !(IE) ]><!--> <body class='c-profiles/works a-summary logged_out'> <!--<![endif]--> <div id="fb-root"></div><script>window.fbAsyncInit = function() { FB.init({ appId: "2369844204", version: "v8.0", status: true, cookie: true, xfbml: true }); // Additional initialization code. if (window.InitFacebook) { // facebook.ts already loaded, set it up. window.InitFacebook(); } else { // Set a flag for facebook.ts to find when it loads. window.academiaAuthReadyFacebook = true; } };</script><script>window.fbAsyncLoad = function() { // Protection against double calling of this function if (window.FB) { return; } (function(d, s, id){ var js, fjs = d.getElementsByTagName(s)[0]; if (d.getElementById(id)) {return;} js = d.createElement(s); js.id = id; js.src = "//connect.facebook.net/en_US/sdk.js"; fjs.parentNode.insertBefore(js, fjs); }(document, 'script', 'facebook-jssdk')); } if (!window.defer_facebook) { // Autoload if not deferred window.fbAsyncLoad(); } else { // Defer loading by 5 seconds setTimeout(function() { window.fbAsyncLoad(); }, 5000); }</script> <div id="google-root"></div><script>window.loadGoogle = function() { if (window.InitGoogle) { // google.ts already loaded, set it up. window.InitGoogle("331998490334-rsn3chp12mbkiqhl6e7lu2q0mlbu0f1b"); } else { // Set a flag for google.ts to use when it loads. window.GoogleClientID = "331998490334-rsn3chp12mbkiqhl6e7lu2q0mlbu0f1b"; } };</script><script>window.googleAsyncLoad = function() { // Protection against double calling of this function (function(d) { var js; var id = 'google-jssdk'; var ref = d.getElementsByTagName('script')[0]; if (d.getElementById(id)) { return; } js = d.createElement('script'); js.id = id; js.async = true; js.onload = loadGoogle; js.src = "https://accounts.google.com/gsi/client" ref.parentNode.insertBefore(js, ref); }(document)); } if (!window.defer_google) { // Autoload if not deferred window.googleAsyncLoad(); } else { // Defer loading by 5 seconds setTimeout(function() { window.googleAsyncLoad(); }, 5000); }</script> <div id="tag-manager-body-root"> <!-- Google Tag Manager (noscript) --> <noscript><iframe src="https://www.googletagmanager.com/ns.html?id=GTM-5G9JF7Z" height="0" width="0" style="display:none;visibility:hidden"></iframe></noscript> <!-- End Google Tag Manager (noscript) --> <!-- Event listeners for analytics --> <script> window.addEventListener('load', function() { if (document.querySelector('input[name="commit"]')) { document.querySelector('input[name="commit"]').addEventListener('click', function() { gtag('event', 'click', { event_category: 'button', event_label: 'Log In' }) }) } }); </script> </div> <script>var _comscore = _comscore || []; _comscore.push({ c1: "2", c2: "26766707" }); (function() { var s = document.createElement("script"), el = document.getElementsByTagName("script")[0]; s.async = true; s.src = (document.location.protocol == "https:" ? "https://sb" : "http://b") + ".scorecardresearch.com/beacon.js"; el.parentNode.insertBefore(s, el); })();</script><img src="https://sb.scorecardresearch.com/p?c1=2&amp;c2=26766707&amp;cv=2.0&amp;cj=1" style="position: absolute; visibility: hidden" /> <div id='react-modal'></div> <div class='DesignSystem'> <a class='u-showOnFocus' href='#site'> Skip to main content </a> </div> <div id="upgrade_ie_banner" style="display: none;"><p>Academia.edu no longer supports Internet Explorer.</p><p>To browse Academia.edu and the wider internet faster and more securely, please take a few seconds to&nbsp;<a href="https://www.academia.edu/upgrade-browser">upgrade your browser</a>.</p></div><script>// Show this banner for all versions of IE if (!!window.MSInputMethodContext || /(MSIE)/.test(navigator.userAgent)) { document.getElementById('upgrade_ie_banner').style.display = 'block'; }</script> <div class="DesignSystem bootstrap ShrinkableNav"><div class="navbar navbar-default main-header"><div class="container-wrapper" id="main-header-container"><div class="container"><div class="navbar-header"><div class="nav-left-wrapper u-mt0x"><div class="nav-logo"><a data-main-header-link-target="logo_home" href="https://www.academia.edu/"><img class="visible-xs-inline-block" style="height: 24px;" alt="Academia.edu" src="//a.academia-assets.com/images/academia-logo-redesign-2015-A.svg" width="24" height="24" /><img width="145.2" height="18" class="hidden-xs" style="height: 24px;" alt="Academia.edu" src="//a.academia-assets.com/images/academia-logo-redesign-2015.svg" /></a></div><div class="nav-search"><div class="SiteSearch-wrapper select2-no-default-pills"><form class="js-SiteSearch-form DesignSystem" action="https://www.academia.edu/search" accept-charset="UTF-8" method="get"><i class="SiteSearch-icon fa fa-search u-fw700 u-positionAbsolute u-tcGrayDark"></i><input class="js-SiteSearch-form-input SiteSearch-form-input form-control" data-main-header-click-target="search_input" name="q" placeholder="Search" type="text" value="" /></form></div></div></div><div class="nav-right-wrapper pull-right"><ul class="NavLinks js-main-nav list-unstyled"><li class="NavLinks-link"><a class="js-header-login-url Button Button--inverseGray Button--sm u-mb4x" id="nav_log_in" rel="nofollow" href="https://www.academia.edu/login">Log In</a></li><li class="NavLinks-link u-p0x"><a class="Button Button--inverseGray Button--sm u-mb4x" rel="nofollow" href="https://www.academia.edu/signup">Sign Up</a></li></ul><button class="hidden-lg hidden-md hidden-sm u-ml4x navbar-toggle collapsed" data-target=".js-mobile-header-links" data-toggle="collapse" type="button"><span class="icon-bar"></span><span class="icon-bar"></span><span class="icon-bar"></span></button></div></div><div class="collapse navbar-collapse js-mobile-header-links"><ul class="nav navbar-nav"><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/login">Log In</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/signup">Sign Up</a></li><li class="u-borderColorGrayLight u-borderBottom1 js-mobile-nav-expand-trigger"><a href="#">more&nbsp<span class="caret"></span></a></li><li><ul class="js-mobile-nav-expand-section nav navbar-nav u-m0x collapse"><li class="u-borderColorGrayLight u-borderBottom1"><a rel="false" href="https://www.academia.edu/about">About</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/press">Press</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="false" href="https://www.academia.edu/documents">Papers</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/terms">Terms</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/privacy">Privacy</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/copyright">Copyright</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/hiring"><i class="fa fa-briefcase"></i>&nbsp;We're Hiring!</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://support.academia.edu/hc/en-us"><i class="fa fa-question-circle"></i>&nbsp;Help Center</a></li><li class="js-mobile-nav-collapse-trigger u-borderColorGrayLight u-borderBottom1 dropup" style="display:none"><a href="#">less&nbsp<span class="caret"></span></a></li></ul></li></ul></div></div></div><script>(function(){ var $moreLink = $(".js-mobile-nav-expand-trigger"); var $lessLink = $(".js-mobile-nav-collapse-trigger"); var $section = $('.js-mobile-nav-expand-section'); $moreLink.click(function(ev){ ev.preventDefault(); $moreLink.hide(); $lessLink.show(); $section.collapse('show'); }); $lessLink.click(function(ev){ ev.preventDefault(); $moreLink.show(); $lessLink.hide(); $section.collapse('hide'); }); })() if ($a.is_logged_in() || false) { new Aedu.NavigationController({ el: '.js-main-nav', showHighlightedNotification: false }); } else { $(".js-header-login-url").attr("href", $a.loginUrlWithRedirect()); } Aedu.autocompleteSearch = new AutocompleteSearch({el: '.js-SiteSearch-form'});</script></div></div> <div id='site' class='fixed'> <div id="content" class="clearfix"> <script>document.addEventListener('DOMContentLoaded', function(){ var $dismissible = $(".dismissible_banner"); $dismissible.click(function(ev) { $dismissible.hide(); }); });</script> <script src="//a.academia-assets.com/assets/webpack_bundles/profile.wjs-bundle-fd736d66704990dee9f52cdbd5e7b1c7d1b031c0bb5a20dd676d1a6b7d5fea79.js" defer="defer"></script><script>$viewedUser = Aedu.User.set_viewed( {"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew","photo":"https://0.academia-photos.com/64961290/16897223/76074370/s65_paramate.horkaew.jpg","has_photo":true,"department":{"id":975184,"name":"School of Computer Engineering","url":"https://biotech.academia.edu/Departments/School_of_Computer_Engineering/Documents","university":{"id":12944,"name":"SURANAREE UNIVERSITY OF TECHNOLOGY","url":"https://biotech.academia.edu/"}},"position":"Faculty Member","position_id":1,"is_analytics_public":false,"interests":[{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision"},{"id":54589,"name":"Anatomy","url":"https://www.academia.edu/Documents/in/Anatomy"},{"id":1252,"name":"Remote Sensing","url":"https://www.academia.edu/Documents/in/Remote_Sensing"},{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"}]} ); if ($a.is_logged_in() && $viewedUser.is_current_user()) { $('body').addClass('profile-viewed-by-owner'); } $socialProfiles = []</script><div id="js-react-on-rails-context" style="display:none" data-rails-context="{&quot;inMailer&quot;:false,&quot;i18nLocale&quot;:&quot;en&quot;,&quot;i18nDefaultLocale&quot;:&quot;en&quot;,&quot;href&quot;:&quot;https://biotech.academia.edu/ParamateHorkaew&quot;,&quot;location&quot;:&quot;/ParamateHorkaew&quot;,&quot;scheme&quot;:&quot;https&quot;,&quot;host&quot;:&quot;biotech.academia.edu&quot;,&quot;port&quot;:null,&quot;pathname&quot;:&quot;/ParamateHorkaew&quot;,&quot;search&quot;:null,&quot;httpAcceptLanguage&quot;:null,&quot;serverSide&quot;:false}"></div> <div class="js-react-on-rails-component" style="display:none" data-component-name="ProfileCheckPaperUpdate" data-props="{}" data-trace="false" data-dom-id="ProfileCheckPaperUpdate-react-component-69f7a223-972f-4ac7-b9d6-a1f4ea70bb32"></div> <div id="ProfileCheckPaperUpdate-react-component-69f7a223-972f-4ac7-b9d6-a1f4ea70bb32"></div> <div class="DesignSystem"><div class="onsite-ping" id="onsite-ping"></div></div><div class="profile-user-info DesignSystem"><div class="social-profile-container"><div class="left-panel-container"><div class="user-info-component-wrapper"><div class="user-summary-cta-container"><div class="user-summary-container"><div class="social-profile-avatar-container"><img class="profile-avatar u-positionAbsolute" alt="Paramate Horkaew" border="0" onerror="if (this.src != &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;) this.src = &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;;" width="200" height="200" src="https://0.academia-photos.com/64961290/16897223/76074370/s200_paramate.horkaew.jpg" /></div><div class="title-container"><h1 class="ds2-5-heading-sans-serif-sm">Paramate Horkaew</h1><div class="affiliations-container fake-truncate js-profile-affiliations"><div><a class="u-tcGrayDarker" href="https://biotech.academia.edu/">SURANAREE UNIVERSITY OF TECHNOLOGY</a>, <a class="u-tcGrayDarker" href="https://biotech.academia.edu/Departments/School_of_Computer_Engineering/Documents">School of Computer Engineering</a>, <span class="u-tcGrayDarker">Faculty Member</span></div></div></div></div><div class="sidebar-cta-container"><button class="ds2-5-button hidden profile-cta-button grow js-profile-follow-button" data-broccoli-component="user-info.follow-button" data-click-track="profile-user-info-follow-button" data-follow-user-fname="Paramate" data-follow-user-id="64961290" data-follow-user-source="profile_button" data-has-google="false"><span class="material-symbols-outlined" style="font-size: 20px" translate="no">add</span>Follow</button><button class="ds2-5-button hidden profile-cta-button grow js-profile-unfollow-button" data-broccoli-component="user-info.unfollow-button" data-click-track="profile-user-info-unfollow-button" data-unfollow-user-id="64961290"><span class="material-symbols-outlined" style="font-size: 20px" translate="no">done</span>Following</button></div></div><div class="user-stats-container"><a><div class="stat-container js-profile-followers"><p class="label">Followers</p><p class="data">451</p></div></a><a><div class="stat-container js-profile-followees" data-broccoli-component="user-info.followees-count" data-click-track="profile-expand-user-info-following"><p class="label">Following</p><p class="data">35</p></div></a><a><div class="stat-container js-profile-coauthors" data-broccoli-component="user-info.coauthors-count" data-click-track="profile-expand-user-info-coauthors"><p class="label">Co-authors</p><p class="data">3</p></div></a><div class="js-mentions-count-container" style="display: none;"><a href="/ParamateHorkaew/mentions"><div class="stat-container"><p class="label">Mentions</p><p class="data"></p></div></a></div><span><div class="stat-container"><p class="label"><span class="js-profile-total-view-text">Public Views</span></p><p class="data"><span class="js-profile-view-count"></span></p></div></span></div><div class="user-bio-container"><div class="profile-bio fake-truncate js-profile-about" style="margin: 0px;">Dr. Horkaew received his B.Eng. (1st Hons.) in Telecommunication Engineering (1999) from King Mongkut&#39;s Institute of Technology, Ladkrabang (KMITL). During his undergraduate study, he worked as an RA in medical informatics at the Computed Tomography Laboratory, NECTEC (1997-99). He had then continued his research, supported by the Ministry of Science, in medical image computing at the Visual Information Processing (VIP) group and the Royal Society/ Wolfson Foundation Medical Image Computing (MIC) laboratory, Imperial College London (2000-04).<br /><br />His Ph.D. thesis focused on an efficient and automatic method for constructing the optimal statistical deformable model for complex topological shapes with application to cardiovascular imaging. As part of his research, in collaboration with the Royal Brompton Hospital (RBHT) London, he also co-wrote a computer assisted diagnosis software for cardiovascular magnetic resonance images (CMRToolsR).<br /><br />He is now an associate professor with the School of Computer Engineering, Suranaree University of Technology. His main research interests include Networks, Remote Sensing, Computational Anatomy, Digital Geometry Processing, Computer Vision and Graphics.<br /><div class="js-profile-less-about u-linkUnstyled u-tcGrayDarker u-textDecorationUnderline u-displayNone">less</div></div></div><div class="suggested-academics-container"><div class="suggested-academics--header"><p class="ds2-5-body-md-bold">Related Authors</p></div><ul class="suggested-user-card-list"><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://saic.academia.edu/JElkins"><img class="profile-avatar u-positionAbsolute" alt="James Elkins" border="0" onerror="if (this.src != &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;) this.src = &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;;" width="200" height="200" src="https://0.academia-photos.com/15483/5205/5067/s200_james.elkins.jpg" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://saic.academia.edu/JElkins">James Elkins</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">School of the Art Institute of Chicago</p></div></div><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://duke.academia.edu/MaurizioForte"><img class="profile-avatar u-positionAbsolute" alt="Maurizio Forte" border="0" onerror="if (this.src != &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;) this.src = &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;;" width="200" height="200" src="https://0.academia-photos.com/85311/23778/1878336/s200_maurizio.forte.jpeg" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://duke.academia.edu/MaurizioForte">Maurizio Forte</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">Duke University</p></div></div><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://chavez-ucla.academia.edu/GiuliaSissa"><img class="profile-avatar u-positionAbsolute" alt="Giulia Sissa" border="0" onerror="if (this.src != &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;) this.src = &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;;" width="200" height="200" src="https://0.academia-photos.com/137239/36417/316832/s200_giulia.sissa.jpeg" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://chavez-ucla.academia.edu/GiuliaSissa">Giulia Sissa</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">Ucla</p></div></div><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://uq.academia.edu/OlgaPanagiotopoulou"><img class="profile-avatar u-positionAbsolute" alt="Olga Panagiotopoulou" border="0" onerror="if (this.src != &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;) this.src = &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;;" width="200" height="200" src="https://0.academia-photos.com/163818/41668/13994003/s200_olga.panagiotopoulou.jpg" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://uq.academia.edu/OlgaPanagiotopoulou">Olga Panagiotopoulou</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">The University of Queensland, Australia</p></div></div><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://oxfordbrookes.academia.edu/FabioCuzzolin"><img class="profile-avatar u-positionAbsolute" alt="Fabio Cuzzolin" border="0" onerror="if (this.src != &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;) this.src = &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;;" width="200" height="200" src="https://0.academia-photos.com/366407/112374/61740579/s200_fabio.cuzzolin.jpg" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://oxfordbrookes.academia.edu/FabioCuzzolin">Fabio Cuzzolin</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">Oxford Brookes University</p></div></div><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://ncit.academia.edu/RoshanChitrakar"><img class="profile-avatar u-positionAbsolute" alt="Roshan Chitrakar" border="0" onerror="if (this.src != &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;) this.src = &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;;" width="200" height="200" src="https://0.academia-photos.com/371695/9733675/15833098/s200_roshan.chitrakar.jpg" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://ncit.academia.edu/RoshanChitrakar">Roshan Chitrakar</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">Nepal College of Information Technology</p></div></div><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://gc-cuny.academia.edu/LevManovich"><img class="profile-avatar u-positionAbsolute" alt="Lev Manovich" border="0" onerror="if (this.src != &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;) this.src = &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;;" width="200" height="200" src="https://0.academia-photos.com/412778/130321/66062342/s200_lev.manovich.jpg" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://gc-cuny.academia.edu/LevManovich">Lev Manovich</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">Graduate Center of the City University of New York</p></div></div><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://uniroma1.academia.edu/DavideNadali"><img class="profile-avatar u-positionAbsolute" alt="Davide Nadali" border="0" onerror="if (this.src != &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;) this.src = &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;;" width="200" height="200" src="https://0.academia-photos.com/449203/145932/85899910/s200_davide.nadali.jpeg" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://uniroma1.academia.edu/DavideNadali">Davide Nadali</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">Università degli Studi &quot;La Sapienza&quot; di Roma</p></div></div><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://yildiz.academia.edu/FBalikSanli"><img class="profile-avatar u-positionAbsolute" alt="Fusun Balik Sanli" border="0" onerror="if (this.src != &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;) this.src = &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;;" width="200" height="200" src="https://0.academia-photos.com/1255617/2931770/10126674/s200_fusun.balik_sanli.jpg" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://yildiz.academia.edu/FBalikSanli">Fusun Balik Sanli</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">Yildiz Technical University</p></div></div><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://sante-animale.academia.edu/hassenjerbi"><img class="profile-avatar u-positionAbsolute" alt="hassen jerbi" border="0" onerror="if (this.src != &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;) this.src = &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;;" width="200" height="200" src="https://0.academia-photos.com/1273172/470521/10955136/s200_hassen.jerbi.jpg" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://sante-animale.academia.edu/hassenjerbi">hassen jerbi</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">National Veterinary School of sidi thabet tunisia</p></div></div></ul></div><div class="ri-section"><div class="ri-section-header"><span>Interests</span></div><div class="ri-tags-container"><a data-click-track="profile-user-info-expand-research-interests" data-has-card-for-ri-list="64961290" href="https://www.academia.edu/Documents/in/Computer_Vision"><div id="js-react-on-rails-context" style="display:none" data-rails-context="{&quot;inMailer&quot;:false,&quot;i18nLocale&quot;:&quot;en&quot;,&quot;i18nDefaultLocale&quot;:&quot;en&quot;,&quot;href&quot;:&quot;https://biotech.academia.edu/ParamateHorkaew&quot;,&quot;location&quot;:&quot;/ParamateHorkaew&quot;,&quot;scheme&quot;:&quot;https&quot;,&quot;host&quot;:&quot;biotech.academia.edu&quot;,&quot;port&quot;:null,&quot;pathname&quot;:&quot;/ParamateHorkaew&quot;,&quot;search&quot;:null,&quot;httpAcceptLanguage&quot;:null,&quot;serverSide&quot;:false}"></div> <div class="js-react-on-rails-component" style="display:none" data-component-name="Pill" data-props="{&quot;color&quot;:&quot;gray&quot;,&quot;children&quot;:[&quot;Computer Vision&quot;]}" data-trace="false" data-dom-id="Pill-react-component-1a12677f-eef3-4204-b050-9a9f337e6d12"></div> <div id="Pill-react-component-1a12677f-eef3-4204-b050-9a9f337e6d12"></div> </a><a data-click-track="profile-user-info-expand-research-interests" data-has-card-for-ri-list="64961290" href="https://www.academia.edu/Documents/in/Anatomy"><div class="js-react-on-rails-component" style="display:none" data-component-name="Pill" data-props="{&quot;color&quot;:&quot;gray&quot;,&quot;children&quot;:[&quot;Anatomy&quot;]}" data-trace="false" data-dom-id="Pill-react-component-1d1c4a69-f6d0-4157-bfa7-fda950497ef8"></div> <div id="Pill-react-component-1d1c4a69-f6d0-4157-bfa7-fda950497ef8"></div> </a><a data-click-track="profile-user-info-expand-research-interests" data-has-card-for-ri-list="64961290" href="https://www.academia.edu/Documents/in/Remote_Sensing"><div class="js-react-on-rails-component" style="display:none" data-component-name="Pill" data-props="{&quot;color&quot;:&quot;gray&quot;,&quot;children&quot;:[&quot;Remote Sensing&quot;]}" data-trace="false" data-dom-id="Pill-react-component-bc014189-225e-4360-8e66-6d40422e3577"></div> <div id="Pill-react-component-bc014189-225e-4360-8e66-6d40422e3577"></div> </a><a data-click-track="profile-user-info-expand-research-interests" data-has-card-for-ri-list="64961290" href="https://www.academia.edu/Documents/in/Computer_Science"><div class="js-react-on-rails-component" style="display:none" data-component-name="Pill" data-props="{&quot;color&quot;:&quot;gray&quot;,&quot;children&quot;:[&quot;Computer Science&quot;]}" data-trace="false" data-dom-id="Pill-react-component-2f10d541-9f5c-4db5-9853-4305d28ea6f9"></div> <div id="Pill-react-component-2f10d541-9f5c-4db5-9853-4305d28ea6f9"></div> </a></div></div></div></div><div class="right-panel-container"><div class="user-content-wrapper"><div class="uploads-container" id="social-redesign-work-container"><div class="upload-header"><h2 class="ds2-5-heading-sans-serif-xs">Uploads</h2></div><div class="documents-container backbone-social-profile-documents" style="width: 100%;"><div class="u-taCenter"></div><div class="profile--tab_content_container js-tab-pane tab-pane active" id="all"><div class="profile--tab_heading_container js-section-heading" data-section="Papers" id="Papers"><h3 class="profile--tab_heading_container">Papers by Paramate Horkaew</h3></div><div class="js-work-strip profile--work_container" data-work-id="125528953"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/125528953/Non_Destructive_Inspection_of_Tile_Debonding_by_DWT_and_MFCC_of_Tile_Tapping_Sound_with_Machine_versus_Deep_Learning_Models"><img alt="Research paper thumbnail of Non-Destructive Inspection of Tile Debonding by DWT and MFCC of Tile-Tapping Sound with Machine versus Deep Learning Models" class="work-thumbnail" src="https://attachments.academia-assets.com/119555165/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/125528953/Non_Destructive_Inspection_of_Tile_Debonding_by_DWT_and_MFCC_of_Tile_Tapping_Sound_with_Machine_versus_Deep_Learning_Models">Non-Destructive Inspection of Tile Debonding by DWT and MFCC of Tile-Tapping Sound with Machine versus Deep Learning Models</a></div><div class="wp-workCard_item"><span>ECTI Transactions on Computer and Information Technology</span><span>, Jan 20, 2024</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">One of the essential processes of construction quality control is tile bonding inspection. Hollow...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">One of the essential processes of construction quality control is tile bonding inspection. Hollows beneath tile tessellation can lead to unbounded or completely broken tiles. An interior inspector typically used a hollowsounding technique. However, it relies on skill and judgment that greatly vary among individuals. Moreover, equipment and interpretation are difcult to calibrate and standardize. This paper addresses these issues by employing machine-learning strategies for tile-tapping sound classication. Provided that a tapping signal was digitally acquired, the proposed method was fully computerized. Firstly, the signal was analyzed and its wavelets and MFCC were extracted. The corresponding spectral features were then classied by SVM, k-NN, Naïve Bayes, and Logistic Regression algorithm, in turn. The results were subsequently compared against those from a previous works that employed a deep learning strategy. It was revealed that when the proposed method was properly congured, it required much less computing resources than the deep learning based one, while being able to distinguish dull from hollow sounding tiles with 93.67% accuracy.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="2e706f781a10018ebe606173d65ddd08" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:119555165,&quot;asset_id&quot;:125528953,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/119555165/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="125528953"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="125528953"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 125528953; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=125528953]").text(description); $(".js-view-count[data-work-id=125528953]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 125528953; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='125528953']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "2e706f781a10018ebe606173d65ddd08" } } $('.js-work-strip[data-work-id=125528953]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":125528953,"title":"Non-Destructive Inspection of Tile Debonding by DWT and MFCC of Tile-Tapping Sound with Machine versus Deep Learning Models","internal_url":"https://www.academia.edu/125528953/Non_Destructive_Inspection_of_Tile_Debonding_by_DWT_and_MFCC_of_Tile_Tapping_Sound_with_Machine_versus_Deep_Learning_Models","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[{"id":119555165,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/119555165/thumbnails/1.jpg","file_name":"171870.pdf","download_url":"https://www.academia.edu/attachments/119555165/download_file","bulk_download_file_name":"Non_Destructive_Inspection_of_Tile_Debon.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/119555165/171870-libre.pdf?1731564858=\u0026response-content-disposition=attachment%3B+filename%3DNon_Destructive_Inspection_of_Tile_Debon.pdf\u0026Expires=1740106380\u0026Signature=fzGV0pKYXjjR9bknZ92UZ6v8u6tmzUrEHpHmMUCgpWb5xhkwf92vh9C1ziznTovx-~Uav5vT3LrSlZKO2FxoNUCG6FyqyPhJeHncTYbytMw4EN6XM2LbdBS5f2mUnEvOs5-Aze86K7TR8Jrk6VeGUY982vYo5QLnwWDiVUHVU1FbEOAmrOjlmpq2xVrPoRtxtGL2ATeS2jiwWYWgp9iiSnOeUX0lSjoW7F19WrBM4VxqLIY-Fajelt4slHCuEN9mOufRjb-pqUNf0R~AfMkZEDjCQcHlWkv4~ywtmZ6UN67TnNJR45mTOlBN-cU99DiOILTMl0vm3MfqzSqedC0q4g__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"},{"id":119555164,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/119555164/thumbnails/1.jpg","file_name":"171870.pdf","download_url":"https://www.academia.edu/attachments/119555164/download_file","bulk_download_file_name":"Non_Destructive_Inspection_of_Tile_Debon.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/119555164/171870-libre.pdf?1731564859=\u0026response-content-disposition=attachment%3B+filename%3DNon_Destructive_Inspection_of_Tile_Debon.pdf\u0026Expires=1740106380\u0026Signature=GsSV84-KTlklZQlQf-hQXswLAwB6QMpt66uKzNF6megkgd7lJGP9e4PTkfVnUDxcOSF-osTeCr30UK-64bDl7juRleXtj4i5cOqk9zOW9cgYSo9v0mk1nm8XxBRtbBU5j3Y3n~lX0s7zmtYza8nBSGnHiXGYxhWLuJXir0A9Fi3yBBmla3SqeswMdFOHEID7LT8VuzYi-rXGOnWK2kzrRnbb-Xph1vD0LIJZo-sq506UzqEWTevrWR9UMaKAA2Zp2apIlCj81EsXCo7ok8lNeXWz~4K0lpr3d4hrykaWdVagLq3RuS976ax0PUqg84iQtT0MDI-o2uyx7SawLEZzLg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="125528951"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/125528951/A_Geospatial_Donation_Platform_for_COVID_19_and_Beyond_Leveraging_Location_Based_Services_and_Geofencing"><img alt="Research paper thumbnail of A Geospatial Donation Platform for COVID-19 and Beyond, Leveraging Location – Based Services and Geofencing" class="work-thumbnail" src="https://attachments.academia-assets.com/119555160/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/125528951/A_Geospatial_Donation_Platform_for_COVID_19_and_Beyond_Leveraging_Location_Based_Services_and_Geofencing">A Geospatial Donation Platform for COVID-19 and Beyond, Leveraging Location – Based Services and Geofencing</a></div><div class="wp-workCard_item"><span>TEM Journal</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The current global scenario is characterized by epidemics and various types of disasters, severel...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The current global scenario is characterized by epidemics and various types of disasters, severely impacting communities&amp;#39; health, living conditions, and economic stability. Especially during such crises, the requirement for essential necessities becomes critical. Existing solution guidelines involve receiving donated items from public agencies through an offline system to provide assistance to victims. However, this implementation faces several limitations, such as a lack of understanding of people&amp;#39;s needs in specific areas, leading to mismatches between assistance and actual requirements. Additionally, donators lack adequate information, resulting in further discrepancies between donated items and the victims&amp;#39; genuine needs. The available geospatial platforms primarily support surveillance and monitoring of epidemic or disaster situations but fail to address the management of needs related to donation and receipt. Through an extensive review of the literature and relate...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="d7f9cb83033ee12afb36a08cfbe04c7b" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:119555160,&quot;asset_id&quot;:125528951,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/119555160/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="125528951"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="125528951"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 125528951; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=125528951]").text(description); $(".js-view-count[data-work-id=125528951]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 125528951; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='125528951']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "d7f9cb83033ee12afb36a08cfbe04c7b" } } $('.js-work-strip[data-work-id=125528951]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":125528951,"title":"A Geospatial Donation Platform for COVID-19 and Beyond, Leveraging Location – Based Services and Geofencing","internal_url":"https://www.academia.edu/125528951/A_Geospatial_Donation_Platform_for_COVID_19_and_Beyond_Leveraging_Location_Based_Services_and_Geofencing","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[{"id":119555160,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/119555160/thumbnails/1.jpg","file_name":"TEMJournalAugust2023_1902_1913.pdf","download_url":"https://www.academia.edu/attachments/119555160/download_file","bulk_download_file_name":"A_Geospatial_Donation_Platform_for_COVID.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/119555160/TEMJournalAugust2023_1902_1913-libre.pdf?1731564864=\u0026response-content-disposition=attachment%3B+filename%3DA_Geospatial_Donation_Platform_for_COVID.pdf\u0026Expires=1740106380\u0026Signature=VpjfeNRn1DArP6vUvtK5-IBsSBFDb5PPKBEPRi1FFnaaMTDP5I8g3MwUa58nDLI8eZYssmAjK4X~gSBb2hgUPfhP8eddWM9o9ugbKn3aZpjsm7xdm4INqFTZwv9I~VIPKx0wBs1Cj0ucK7BynN2esxUpbA4Se5uO8JTDIouZEj9Yzsq1fW6WzGYppSZgmYkw8L9dz0Vj-r4ylTl3jP1lkA3dBaBr7B3dK31qVhvNWqpcv~~kElWm5v4MEweDaF-5qbOzFYVrSXZZz-eZcVcTp5rOv6lLoGAaywAiJ9gK6f0ppppJ~Ub95tiAeyFVQP2tY4z-O7fQHRn3HQIxIaiH1g__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"},{"id":119555161,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/119555161/thumbnails/1.jpg","file_name":"TEMJournalAugust2023_1902_1913.pdf","download_url":"https://www.academia.edu/attachments/119555161/download_file","bulk_download_file_name":"A_Geospatial_Donation_Platform_for_COVID.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/119555161/TEMJournalAugust2023_1902_1913-libre.pdf?1731564862=\u0026response-content-disposition=attachment%3B+filename%3DA_Geospatial_Donation_Platform_for_COVID.pdf\u0026Expires=1740106380\u0026Signature=T0gwgGv1xrMgCTi--n2ELPoIIucseTISMaVkwn4ob~T4-WfMN4QAEEpEPs~X11jnuGS41CoMwgyF4mthKJskc0xPyZ57jxZRGeYK52qlN~93zai94mfi~6lckvKXYKlQpBBHSr5QIAG1ZYiYtABrAKXpsJP5pzNLYQuLB6KdcRG5HHOGz9q5UGnjxZe44VOBheNuAIpjAyBsVQRBgqW4TfrRQEOybXEMhS~AMAoSg~8KiV~EsglLf4u0Ni1cwfk9UGozwG2jhUydGK7fmWIXKzDa4UlHSyB-7Br1QuzusOXc2m03JA6V3i~aridgsoQ3rUt9ac~SaFGf9DMOLzY2MA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="116363517"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/116363517/Flood_Warning_and_Management_Schemes_with_Drone_Emulator_Using_Ultrasonic_and_Image_Processing"><img alt="Research paper thumbnail of Flood Warning and Management Schemes with Drone Emulator Using Ultrasonic and Image Processing" class="work-thumbnail" src="https://attachments.academia-assets.com/112514988/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/116363517/Flood_Warning_and_Management_Schemes_with_Drone_Emulator_Using_Ultrasonic_and_Image_Processing">Flood Warning and Management Schemes with Drone Emulator Using Ultrasonic and Image Processing</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/BoonchooSrikudkao">Boonchoo Srikudkao</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://biotech.academia.edu/ParamateHorkaew">Paramate Horkaew</a></span></div><div class="wp-workCard_item"><span>Advances in Intelligent Systems and Computing</span><span>, 2015</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The objective of this paper is to assess the feasibility of an alternative approach to collect wa...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The objective of this paper is to assess the feasibility of an alternative approach to collect water information relating to flooding crisis by means of a small drone. This information includes aerial images, their geographic locations, water flow velocity and its direction, all of which are normally difficult to obtain and in fact expensive should a conventional helicopter or buoyancy are opted. With a drone, however, these acquisitions can be done by a minimally trained operator and under controlled budget. This paper presents the breakout configuration and integration of various sensors and their data management scheme based on a series of image processing techniques, emulating the tasks required to estimate the key flood related parameters. The experimental results reported herein could provide a basis for determining its potential applications in flood warning and predicting systems, as well as concerns that need to be addressed.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="5ee6b70f9c84a87c58d385ef74ed1ab6" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:112514988,&quot;asset_id&quot;:116363517,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/112514988/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="116363517"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="116363517"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 116363517; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=116363517]").text(description); $(".js-view-count[data-work-id=116363517]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 116363517; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='116363517']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "5ee6b70f9c84a87c58d385ef74ed1ab6" } } $('.js-work-strip[data-work-id=116363517]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":116363517,"title":"Flood Warning and Management Schemes with Drone Emulator Using Ultrasonic and Image Processing","internal_url":"https://www.academia.edu/116363517/Flood_Warning_and_Management_Schemes_with_Drone_Emulator_Using_Ultrasonic_and_Image_Processing","owner_id":5600995,"coauthors_can_edit":true,"owner":{"id":5600995,"first_name":"Boonchoo","middle_initials":null,"last_name":"Srikudkao","page_name":"BoonchooSrikudkao","domain_name":"independent","created_at":"2013-09-17T07:10:46.939-07:00","display_name":"Boonchoo Srikudkao","url":"https://independent.academia.edu/BoonchooSrikudkao"},"attachments":[{"id":112514988,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/112514988/thumbnails/1.jpg","file_name":"978-3-319-19024-2_11.pdf","download_url":"https://www.academia.edu/attachments/112514988/download_file","bulk_download_file_name":"Flood_Warning_and_Management_Schemes_wit.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/112514988/978-3-319-19024-2_11-libre.pdf?1710736760=\u0026response-content-disposition=attachment%3B+filename%3DFlood_Warning_and_Management_Schemes_wit.pdf\u0026Expires=1740157310\u0026Signature=VFyvqHXNT-NS-6FuGViveTKXeysRQIFBji1YJ64Wb0wrRxfW~MjAXmzOSoHCiuyB~hNlrbYSxF1niKoEFcea~e5qZfT9AV9wZJeJGjkT3VhNawpJe2HFeKhkjzEamHKOtqjI0dPrKF3b50MM9dTwuseEc1FIRHE1-oocSYAZyMNiwO4wpwb2HdOwvaYGVxPmjWFQV2c8NgfKVHUoyRk5PfK-wtDtv7--1jzuQSdEOTonuFMNJOZ1T18IhfjGm~2qQHZneKKhp8ducZQud8LqdG436rjp3LyMA8p7JgvBntfUwKE5ElFdaysE03G7gL2AoG2TiJWPO-u4bOOjH~mFcg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111955748"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111955748/Factors_Contributing_to_Students_Engagement_A_Case_Study_at_the_Institute_of_Medicine_at_SUT"><img alt="Research paper thumbnail of Factors Contributing to Students Engagement: A Case Study at the Institute of Medicine at SUT" class="work-thumbnail" src="https://attachments.academia-assets.com/109337285/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111955748/Factors_Contributing_to_Students_Engagement_A_Case_Study_at_the_Institute_of_Medicine_at_SUT">Factors Contributing to Students Engagement: A Case Study at the Institute of Medicine at SUT</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">In higher education, much attention has been focused on the enhancement of the educational experi...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">In higher education, much attention has been focused on the enhancement of the educational experience, allowing students to successfully develop and thus make the most of not only their potential, but also the numerous other benefits education has to offer. Being engaged both institutionally and academically plays a vital part in developing their potential and performance. Therefore, this paper studied the engagement level towards the academics at the Institute of medicine. Factors contributing to institutional engagement were also analyzed. The participants include 229 medical students. Each participant was asked to answer a general demographic questionnaire, the Institute engagement questionnaire, the Utrecht Work Engagement Scale-Student version (UWES-S) questionnaire, and a questionnaire of all relevant factors. Student engagement was assessed through statistical analysis. These included percentage, mean, standard deviation, and stepwise multiple regression of the constituent factors. The Institute engagement level was 3.73. Factors that significantly pertained to the engagement level were teachers (p = 0.01*), staff (p = 0.01*), friends (p = 0.02*), and seniors peers (p = 0.03*), respectively. Academic engagement was found to vary by the level of study. Medical students in their 1st, 2nd, and 3rd years exhibited engagement levels of 4.94, 4.87, and 4.55, respectively. Given the group, students&#39; engagement toward the university was of a high level. The most important contributing factors were their relationship with teachers, staff, friends, and senior peers. However, the academic engagement level tended to decrease as study progressed. It was conjectured that this notable decrease resulted from increasing complexity in the program as specified by the curriculum. Positively engaged students better adapt to the academic context of higher education. Hence, they are much likely to succeed.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="ad207990e40c68357f323252d91f52c0" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109337285,&quot;asset_id&quot;:111955748,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109337285/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111955748"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111955748"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111955748; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111955748]").text(description); $(".js-view-count[data-work-id=111955748]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111955748; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111955748']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "ad207990e40c68357f323252d91f52c0" } } $('.js-work-strip[data-work-id=111955748]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111955748,"title":"Factors Contributing to Students Engagement: A Case Study at the Institute of Medicine at SUT","internal_url":"https://www.academia.edu/111955748/Factors_Contributing_to_Students_Engagement_A_Case_Study_at_the_Institute_of_Medicine_at_SUT","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[{"id":109337285,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109337285/thumbnails/1.jpg","file_name":"sjss_2C_08_Areerat_Siripongpan__2893-105_29.pdf_filename_UTF-8sjss_2C_08_Areerat_Siripongpan__2893-105_29.pdf","download_url":"https://www.academia.edu/attachments/109337285/download_file","bulk_download_file_name":"Factors_Contributing_to_Students_Engagem.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109337285/sjss_2C_08_Areerat_Siripongpan__2893-105_29.pdf_filename_UTF-8sjss_2C_08_Areerat_Siripongpan__2893-105_29-libre.pdf?1703132821=\u0026response-content-disposition=attachment%3B+filename%3DFactors_Contributing_to_Students_Engagem.pdf\u0026Expires=1740106380\u0026Signature=a09LAZu2XImo50V6eMUd4MauwJ766dgRlOhwcVCzZzpANATHtciuCerE6qFHWTMfqQmlazsG9XpmDnEvORIEr29R6M62GbNzGAt-ZynZmHL5zLelmJ7ZBKLp0kRBXUTTFUjQwMBMt0-yZPGh1NnCekGTvLerYqC8dS1HV53rG4I~2MB6yn11UXGX7FkrYNCjKYcSqPSGmJEK1xi79sTS7pCpJUfZreG6GK0vvO9kbejEh6YnOUqAMMIyPTNOavdpHNhyoG4sPElS1F5ehN6AwFBvCmhkcMTUi5oXKuhUMpNiWbfXG7D0QHY7xHayLsbNABshoBzFT4n2ppVuYn~KTQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111955747"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/111955747/Statistical_modelling_of_complex_topological_shapes_with_application_to_cardiovascular_imaging"><img alt="Research paper thumbnail of Statistical modelling of complex topological shapes with application to cardiovascular imaging" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/111955747/Statistical_modelling_of_complex_topological_shapes_with_application_to_cardiovascular_imaging">Statistical modelling of complex topological shapes with application to cardiovascular imaging</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The blood flow patterns in vivo are highly complex; they vary considerably from subject to subjec...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The blood flow patterns in vivo are highly complex; they vary considerably from subject to subject and even more so in patients with cardiovascular diseases. Over the last five years, there has been a rapid surge of interest in combining computational fluid dynamics (CFD) with in vivo imaging techniques for studying interactions between vessel morphology and blood flow patterns. CFD gives the ability to compute features/properties which cannot be measured, e.g. wall shear stress, mass transfer rate, but are important to studies of atherosclerosis, or the design of vessel prostheses. Moreover, it can also provide details of the flow which are often beyond the discrimination of the imaging techniques. This trend is driven by our increased understanding of biomechanics, maturity of computational modelling techniques, and advancement in imaging. To this end, accurate delineation of cardiac morphology and its associated in-flow/out-flow tracts is required. Due to the complex topology of the dynamic shapes involved, this procedure usually involves labour-intensive user interaction with a large amount of 4D spatio-temporal information. With the increasing popularity of the active shape and appearance models, 3D shape modelling and segmentation based on these techniques are gaining significant clinical interest. The practical quality of the statistical model relies on the definition of correspondence across a set of segmented samples. For time-varying 3D cardiovascular structures, landmarks based techniques are not only time consuming but also prone to subjective error, as temporal alignment of geometrical features is difficult. Moreover, when all of the structures including inflow/outflow tracts are considered, the shape to be modelled becomes highly complex even in its static form. This makes the identification of dense correspondence within the training set a significant challenge. The purpose of this thesis is to develop a practical approach towards optimal statistical modelling and segmentation for dynamic 3D objects with complex topology. The method relies on harmonic embedding for establishing optimal global correspondence for a set of dynamic surfaces. We first demonstrate how it can be used for shapes whose topological realization is homeomorphic to a compact 2D manifold with boundary. A conformal harmonic map and tensor product B-splines are used to create a multi-resolution representation of the surfaces that are re- parameterized by using hierarchical piecewise bilinear maps in a coarse-to-fine manner. The optimal global correspondence within the training shapes is identified by an objective function based on the principle of minimum description length. The strength of the method is demonstrated by building a concise yet physiologically plausible statistical shape model of the normal human left ventricle which has principal modes of variation that correspond to intrinsic cardiac motions. The proposed framework is then extended to dynamic shapes with higher genus. Criteria based on surface conformality and minimum description length are used to simultaneously identify the intrinsic global correspondence of the training data. The strength of the method is demonstrated by building a statistical model of the complex anatomical structure of the heart which includes atria, ventricles, aortic/pulmonary out How tracts, pulmonary veins/arteries, and superior/inferior vena cavae. The analysis of variance and leave-one-out-crossvalidation indicate that the derived model not only captures physiologically plausible modes of variation but also is robust and concise, thus greatly enhancing its potential clinical value. With this thesis, we also demonstrate how the derived dynamic statistical shape model can be used for 4D cardiac image segmentation and combined MR/CFD haemodynamic modelling.Open acces</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111955747"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111955747"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111955747; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111955747]").text(description); $(".js-view-count[data-work-id=111955747]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111955747; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111955747']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=111955747]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111955747,"title":"Statistical modelling of complex topological shapes with application to cardiovascular imaging","internal_url":"https://www.academia.edu/111955747/Statistical_modelling_of_complex_topological_shapes_with_application_to_cardiovascular_imaging","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111955746"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/111955746/Generate_an_adaptive_de_cubing_automatic_processing_for_laminated_object_manufacturing_LOM_"><img alt="Research paper thumbnail of Generate an adaptive de-cubing automatic processing for laminated object manufacturing (LOM)" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/111955746/Generate_an_adaptive_de_cubing_automatic_processing_for_laminated_object_manufacturing_LOM_">Generate an adaptive de-cubing automatic processing for laminated object manufacturing (LOM)</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">A de-cubing process are important in the Laminated Object Manufacturing (LOM) technique, creates ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">A de-cubing process are important in the Laminated Object Manufacturing (LOM) technique, creates a physical model directly from 3D CAD model without mold and dies by using laminated material. The de-cubing process is to assign shape of waste material into generally small square shape that can be easily remove in order to reduce time and avoid prototype damages. The adaptive de-cubing process applies proportion of number of black pixels on considering area per number of total pixels on considering area. If the proportion is more than a threshold, then the considering axis is divided. On the other hade, if the proportion is less than the threshold, then the considering axis is skipped. The adaptive process algorithm has been developed base on MATLAB platform. The result shown that the bigest threshold exploded the rough de-cubing and stair layer contour, while the smaller threshold produced the fine de-cubing and smooth layer contour.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111955746"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111955746"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111955746; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111955746]").text(description); $(".js-view-count[data-work-id=111955746]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111955746; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111955746']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=111955746]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111955746,"title":"Generate an adaptive de-cubing automatic processing for laminated object manufacturing (LOM)","internal_url":"https://www.academia.edu/111955746/Generate_an_adaptive_de_cubing_automatic_processing_for_laminated_object_manufacturing_LOM_","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111955745"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/111955745/Evaluation_of_the_First_Radiolabeled_99mTc_Jerusalem_Artichoke_Containing_Snack_Bar_on_Gastric_Emptying_and_Satiety_in_Healthy_Female_Volunteers"><img alt="Research paper thumbnail of Evaluation of the First Radiolabeled 99mTc-Jerusalem Artichoke-Containing Snack Bar on Gastric Emptying and Satiety in Healthy Female Volunteers" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/111955745/Evaluation_of_the_First_Radiolabeled_99mTc_Jerusalem_Artichoke_Containing_Snack_Bar_on_Gastric_Emptying_and_Satiety_in_Healthy_Female_Volunteers">Evaluation of the First Radiolabeled 99mTc-Jerusalem Artichoke-Containing Snack Bar on Gastric Emptying and Satiety in Healthy Female Volunteers</a></div><div class="wp-workCard_item"><span>Journal of the Medical Association of Thailand</span><span>, Apr 1, 2018</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111955745"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111955745"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111955745; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111955745]").text(description); $(".js-view-count[data-work-id=111955745]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111955745; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111955745']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=111955745]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111955745,"title":"Evaluation of the First Radiolabeled 99mTc-Jerusalem Artichoke-Containing Snack Bar on Gastric Emptying and Satiety in Healthy Female Volunteers","internal_url":"https://www.academia.edu/111955745/Evaluation_of_the_First_Radiolabeled_99mTc_Jerusalem_Artichoke_Containing_Snack_Bar_on_Gastric_Emptying_and_Satiety_in_Healthy_Female_Volunteers","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111955744"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/111955744/Land_use_and_land_cover_classification_from_satellite_images_based_on_ensemble_machine_learning_and_crowdsourcing_data_verification"><img alt="Research paper thumbnail of Land use and land cover classification from satellite images based on ensemble machine learning and crowdsourcing data verification" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/111955744/Land_use_and_land_cover_classification_from_satellite_images_based_on_ensemble_machine_learning_and_crowdsourcing_data_verification">Land use and land cover classification from satellite images based on ensemble machine learning and crowdsourcing data verification</a></div><div class="wp-workCard_item"><span>International Journal of Cartography</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111955744"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111955744"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111955744; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111955744]").text(description); $(".js-view-count[data-work-id=111955744]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111955744; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111955744']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=111955744]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111955744,"title":"Land use and land cover classification from satellite images based on ensemble machine learning and crowdsourcing data verification","internal_url":"https://www.academia.edu/111955744/Land_use_and_land_cover_classification_from_satellite_images_based_on_ensemble_machine_learning_and_crowdsourcing_data_verification","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111955743"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/111955743/Wavelet_based_Digital_Image_Watermarking_by_using_Lorenz_Chaotic_Signal_Localization"><img alt="Research paper thumbnail of Wavelet-based Digital Image Watermarking by using Lorenz Chaotic Signal Localization" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/111955743/Wavelet_based_Digital_Image_Watermarking_by_using_Lorenz_Chaotic_Signal_Localization">Wavelet-based Digital Image Watermarking by using Lorenz Chaotic Signal Localization</a></div><div class="wp-workCard_item"><span>J. Inf. Process. Syst.</span><span>, 2019</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Transmitting visual information over a broadcasting network is not only prone to a copyright viol...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Transmitting visual information over a broadcasting network is not only prone to a copyright violation but also is a forgery. Authenticating such information and protecting its authorship rights call for more advanced data encoding. To this end, electronic watermarking is often adopted to embed inscriptive signature in imaging data. Most existing watermarking methods while focusing on robustness against degradation remain lacking of measurement against security loophole in which the encrypting scheme once discovered may be recreated by an unauthorized party. This could reveal the underlying signature which may potentially be replaced or forged. This paper therefore proposes a novel digital watermarking scheme in temporal-frequency domain. Unlike other typical wavelet based watermarking, the proposed scheme employed the Lorenz chaotic map to specify embedding positions. Effectively making this is not only a formidable method to decrypt but also a stronger will against deterministic a...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111955743"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111955743"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111955743; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111955743]").text(description); $(".js-view-count[data-work-id=111955743]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111955743; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111955743']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=111955743]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111955743,"title":"Wavelet-based Digital Image Watermarking by using Lorenz Chaotic Signal Localization","internal_url":"https://www.academia.edu/111955743/Wavelet_based_Digital_Image_Watermarking_by_using_Lorenz_Chaotic_Signal_Localization","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111955737"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111955737/Eyewitnesses_Visual_Recollection_in_Suspect_Identification_by_using_Facial_Appearance_Model"><img alt="Research paper thumbnail of Eyewitnesses’ Visual Recollection in Suspect Identification by using Facial Appearance Model" class="work-thumbnail" src="https://attachments.academia-assets.com/109337273/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111955737/Eyewitnesses_Visual_Recollection_in_Suspect_Identification_by_using_Facial_Appearance_Model">Eyewitnesses’ Visual Recollection in Suspect Identification by using Facial Appearance Model</a></div><div class="wp-workCard_item"><span>Baghdad Science Journal</span><span>, Mar 1, 2020</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Facial recognition has been an active field of imaging science. With the recent progresses in com...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Facial recognition has been an active field of imaging science. With the recent progresses in computer vision development, it is extensively applied in various areas, especially in law enforcement and security. Human face is a viable biometric that could be effectively used in both identification and verification. Thus far, regardless of a facial model and relevant metrics employed, its main shortcoming is that it requires a facial image, against which comparison is made. Therefore, closed circuit televisions and a facial database are always needed in an operational system. For the last few decades, unfortunately, we have experienced an emergence of asymmetric warfare, where acts of terrorism are often committed in secluded area with no camera installed and possibly by persons whose photos have never been kept in any official database prior to the event. During subsequent investigations, the authorities thus had to rely on traumatized and frustrated witnesses, whose testimonial accounts regarding suspect&#39;s appearance are dubious and often misleading. To address this issue, this paper presents an application of a statistical appearance model of human face in assisting suspect identification based on witness&#39;s visual recollection. An online prototype system was implemented to demonstrate its core functionalities. Both visual and numerical assessments reported herein evidentially indicated potential benefits of the system for the intended purpose.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="2e9d2112e18a850c0afc11586b1e901d" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109337273,&quot;asset_id&quot;:111955737,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109337273/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111955737"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111955737"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111955737; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111955737]").text(description); $(".js-view-count[data-work-id=111955737]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111955737; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111955737']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "2e9d2112e18a850c0afc11586b1e901d" } } $('.js-work-strip[data-work-id=111955737]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111955737,"title":"Eyewitnesses’ Visual Recollection in Suspect Identification by using Facial Appearance Model","internal_url":"https://www.academia.edu/111955737/Eyewitnesses_Visual_Recollection_in_Suspect_Identification_by_using_Facial_Appearance_Model","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[{"id":109337273,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109337273/thumbnails/1.jpg","file_name":"20378.pdf","download_url":"https://www.academia.edu/attachments/109337273/download_file","bulk_download_file_name":"Eyewitnesses_Visual_Recollection_in_Susp.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109337273/20378-libre.pdf?1703132817=\u0026response-content-disposition=attachment%3B+filename%3DEyewitnesses_Visual_Recollection_in_Susp.pdf\u0026Expires=1740157310\u0026Signature=Bzhyum1i2nSQKD0fjjx61jaiuOVHjCh2S2YRre7TJx1H4qQVf6uYjDpqMbEJW7akPBryKJSKLDfhiKt2q52a7dPtK-W3qhr1vr96eHNeQICSKj0AlkiK0d6pquONQnqlObySDP7T7j9zuQh9AcysyEZI~-phWKo8YyG9Za8IDsZplRA0bntkO--Xc9LR~esHEvckigyzOui8zkEmNf2Vmv-piEvdilRK-gl8XVLONXDyEkmwitLbWPyDOlFc6yUW-hrnWv1jocmRclDF2YspW6IQVFtP3lH8ViOGXbjDWCpHdiXtwtQXsAn30mlHE03B6kYdyWBw0ILAYQL6kiSOdg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="108183845"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/108183845/Fuzzy_logic_rate_adjustment_controls_using_a_circuit_breaker_for_persistent_congestion_in_wireless_sensor_networks"><img alt="Research paper thumbnail of Fuzzy logic rate adjustment controls using a circuit breaker for persistent congestion in wireless sensor networks" class="work-thumbnail" src="https://attachments.academia-assets.com/106634311/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/108183845/Fuzzy_logic_rate_adjustment_controls_using_a_circuit_breaker_for_persistent_congestion_in_wireless_sensor_networks">Fuzzy logic rate adjustment controls using a circuit breaker for persistent congestion in wireless sensor networks</a></div><div class="wp-workCard_item"><span>Wireless Networks</span><span>, Mar 4, 2020</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Congestion control is necessary for enhancing the quality of service in wireless sensor networks ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Congestion control is necessary for enhancing the quality of service in wireless sensor networks (WSNs). With advances in sensing technology, a substantial amount of data traversing a WSN can easily cause congestion, especially given limited resources. As a consequence, network throughput decreases due to significant packet loss and increased delays. Moreover, congestion not only adversely affects the data traffic and transmission success rate but also excessively dissipates energy, which in turn reduces the sensor node and, hence, network lifespans. A typical congestion control strategy was designed to address congestion due to transient events. However, on many occasions, congestion was caused by repeated anomalies and, as a consequence, persisted for an extended period. This paper thus proposes a congestion control strategy that can eliminate both types of congestion. The study adopted a fuzzy logic algorithm for resolving congestion in three key areas: optimal path selection, traffic rate adjustment that incorporates a momentum indicator, and an optimal timeout setting for a circuit breaker to limit persistent congestion. With fuzzy logic, decisions can be made efficiently based on probabilistic weights derived from fitness functions of congestion-relevant parameters. The simulation and experimental results reported herein demonstrate that the proposed strategy outperforms state-of-the-art strategies in terms of the traffic rate, transmission delay, queue utilization, and energy efficiency.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="62f34abb14d018091aaa4085ded4610c" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:106634311,&quot;asset_id&quot;:108183845,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/106634311/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="108183845"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="108183845"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 108183845; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=108183845]").text(description); $(".js-view-count[data-work-id=108183845]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 108183845; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='108183845']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "62f34abb14d018091aaa4085ded4610c" } } $('.js-work-strip[data-work-id=108183845]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":108183845,"title":"Fuzzy logic rate adjustment controls using a circuit breaker for persistent congestion in wireless sensor networks","internal_url":"https://www.academia.edu/108183845/Fuzzy_logic_rate_adjustment_controls_using_a_circuit_breaker_for_persistent_congestion_in_wireless_sensor_networks","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[{"id":106634311,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/106634311/thumbnails/1.jpg","file_name":"s11276-020-02289-020231016-1-yygimj.pdf","download_url":"https://www.academia.edu/attachments/106634311/download_file","bulk_download_file_name":"Fuzzy_logic_rate_adjustment_controls_usi.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/106634311/s11276-020-02289-020231016-1-yygimj-libre.pdf?1697418491=\u0026response-content-disposition=attachment%3B+filename%3DFuzzy_logic_rate_adjustment_controls_usi.pdf\u0026Expires=1740157310\u0026Signature=Z5Uplw0IivCsVeOMoAOvLYsDT1iceXqHZau7o-Fm9NUcGuiQ0pfsStDIRPQ1B3QeA3Wv4v8-dV40O1fhErNoExJP9B89yvjhU6q4IN2WSxIMg4fulb1XjJoAPlIzmH~YA6OMTviA8qX1OLAM~QjZA6Gh2z-XkqXYD-abBEsSD193UfquwuqA7ykTsnzgGRVKu7wJMfOI1kA0tt1rSMi~tw3ZBPGPT9QlubYOWK8raqCMVQmUX3SzBrwv~3A0KwW1y~S6qoQr~4mVkxIomCdlXEwDByXj~hHfvawcINlldzWZ0zbq0CL28LN~AWmjiQ1UoNj~PKVvLQeyQu3kx4FePg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="108183844"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/108183844/A_Geospatial_Platform_for_Crowdsourcing_Green_Space_Area_Management_Using_GIS_and_Deep_Learning_Classification"><img alt="Research paper thumbnail of A Geospatial Platform for Crowdsourcing Green Space Area Management Using GIS and Deep Learning Classification" class="work-thumbnail" src="https://attachments.academia-assets.com/106634291/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/108183844/A_Geospatial_Platform_for_Crowdsourcing_Green_Space_Area_Management_Using_GIS_and_Deep_Learning_Classification">A Geospatial Platform for Crowdsourcing Green Space Area Management Using GIS and Deep Learning Classification</a></div><div class="wp-workCard_item"><span>ISPRS international journal of geo-information</span><span>, Mar 20, 2022</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">This article is an open access article distributed under the terms and conditions of the Creative...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">This article is an open access article distributed under the terms and conditions of the Creative Commons Attribution (CC BY</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="611a958b0dba0a2fcdfa5ef63977358b" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:106634291,&quot;asset_id&quot;:108183844,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/106634291/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="108183844"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="108183844"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 108183844; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=108183844]").text(description); $(".js-view-count[data-work-id=108183844]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 108183844; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='108183844']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "611a958b0dba0a2fcdfa5ef63977358b" } } $('.js-work-strip[data-work-id=108183844]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":108183844,"title":"A Geospatial Platform for Crowdsourcing Green Space Area Management Using GIS and Deep Learning Classification","internal_url":"https://www.academia.edu/108183844/A_Geospatial_Platform_for_Crowdsourcing_Green_Space_Area_Management_Using_GIS_and_Deep_Learning_Classification","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[{"id":106634291,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/106634291/thumbnails/1.jpg","file_name":"ijgi-11-00208-v2.pdf","download_url":"https://www.academia.edu/attachments/106634291/download_file","bulk_download_file_name":"A_Geospatial_Platform_for_Crowdsourcing.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/106634291/ijgi-11-00208-v2-libre.pdf?1697426248=\u0026response-content-disposition=attachment%3B+filename%3DA_Geospatial_Platform_for_Crowdsourcing.pdf\u0026Expires=1740157310\u0026Signature=IU0XIdM4cWymb5fgufM0vPzJgItdKYlZUaHOEMt2PANRxehAj6cy6ssVxhjsUaXHGC4QZE-M-OYx8qdWCFmHLQHA5BmB8JfBlLwV7Me0-LZ2YzI~WgyyLKZtR2Hu~jgSspw5MKzx0E5Il1fVFJRFOWv~bovJBneNIh1VpEEtMWYJ3pqDvkdIORIJ7CA7fnfFoAHJC8~H4daMDQwsmHym0oH9GIQC~DyFn5pqLcVspmONlNcx8UDJcu4vE3kYWNdKm4GS5DGO00AdSCKpko6Zq71cpATC8TaaET~9XVmeX2SS6mEJzNQBplkyfDX3sG4jofSxBUPzSEOFEwY1wKkU4g__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"},{"id":106634292,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/106634292/thumbnails/1.jpg","file_name":"ijgi-11-00208-v2.pdf","download_url":"https://www.academia.edu/attachments/106634292/download_file","bulk_download_file_name":"A_Geospatial_Platform_for_Crowdsourcing.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/106634292/ijgi-11-00208-v2-libre.pdf?1697426249=\u0026response-content-disposition=attachment%3B+filename%3DA_Geospatial_Platform_for_Crowdsourcing.pdf\u0026Expires=1740157310\u0026Signature=FqByvUUK7dGekUWsmVE4KvzdnnqYuw9oSNvN7oMT~WImFceNtHCQCst-SL5AtLsLWd6VCEYjnUwAKFZTXfXIzi7fg-T~5ddrc-~Ibk2-60lgcgMZe4rqz7tcb4BwIptfnmsv2vchzMcVrpmPr47Z2fbSKs2v9uHgww1JkTzzIym1l7LL3xkyOSJh5Klvw3mHwMzCXCkt2~47hHdm4suQ1yq-3Ttp81X~Q7vwXaW3sSg6jzHDeKg65CTXCIBnA3Qyyepe2iQk6XbVXVZmlxgsFQdgW1GDR9HtmtMw045DoHyyID5aKHfEoajpr-f3NfyB3Y1UX0ynTkF8pHvW~LbYgQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="108183842"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/108183842/Statistical_Shape_Modelling_of_the_Levator_Ani_with_Thickness_Variation"><img alt="Research paper thumbnail of Statistical Shape Modelling of the Levator Ani with Thickness Variation" class="work-thumbnail" src="https://attachments.academia-assets.com/106634298/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/108183842/Statistical_Shape_Modelling_of_the_Levator_Ani_with_Thickness_Variation">Statistical Shape Modelling of the Levator Ani with Thickness Variation</a></div><div class="wp-workCard_item"><span>Lecture Notes in Computer Science</span><span>, 2004</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The levator ani is vulnerable to injury during childbirth and effective surgical intervention req...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The levator ani is vulnerable to injury during childbirth and effective surgical intervention requires full knowledge of the morphology and mechanical properties of the muscle structure. The shape of the levator ani and regional thickening during different levels of physiological loading can provide an indication of pelvic floor dysfunction. This paper describes a coupled approach for shape and thickness statistical modelling based on harmonic embedding for volumetric analysis of the levator ani. With harmonic embedding, the dynamic information extracted by the statistical modelling captures shape and thickness variation of the levator ani across different subjects and during varying levels of stress. With this study, we demonstrate that the derived model is compact and physiologically meaningful, demonstrating the practical value of the technique.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="4bdab017b3112591d4bfe84d27b3d2ce" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:106634298,&quot;asset_id&quot;:108183842,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/106634298/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="108183842"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="108183842"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 108183842; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=108183842]").text(description); $(".js-view-count[data-work-id=108183842]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 108183842; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='108183842']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "4bdab017b3112591d4bfe84d27b3d2ce" } } $('.js-work-strip[data-work-id=108183842]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":108183842,"title":"Statistical Shape Modelling of the Levator Ani with Thickness Variation","internal_url":"https://www.academia.edu/108183842/Statistical_Shape_Modelling_of_the_Levator_Ani_with_Thickness_Variation","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[{"id":106634298,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/106634298/thumbnails/1.jpg","file_name":"10.1007_2F978-3-540-30135-6_32.pdf","download_url":"https://www.academia.edu/attachments/106634298/download_file","bulk_download_file_name":"Statistical_Shape_Modelling_of_the_Levat.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/106634298/10.1007_2F978-3-540-30135-6_32-libre.pdf?1697426234=\u0026response-content-disposition=attachment%3B+filename%3DStatistical_Shape_Modelling_of_the_Levat.pdf\u0026Expires=1740157310\u0026Signature=HgSRp3R1xSPb9uuN2fxOj073ztkZ8c8NdiMxxYJkBM517grz23bqiY8cZhxG-PN~kJC-mxAdOFz0ClRC92JYpg8HPL9ATFkrSM~dAc8d3CQxdsvwwdL0obYiAzBZta2acH03svyYSXw~4t3ldHvoqmOtsZPXmq9dtEw76Fp9imEVM7Xjf~yps~fSybjDSANN3Sj0-SBw6DbIvxfM8ZRY2e3TYfpCr3wnwXxggYGYYQqFTLWJzl7Gfj9itg-gR06jVCCmYLH1nWI58MW2Sn3p6htOT3vo9hvFrFdCoL9zZWUnq5PZSUv8bTT9OLzhQL1GTmkUFjvY2H5EyCXI~hC7mA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="92623148"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/92623148/Secure_and_Robust_Image_Watermarking_using_Discrete_Wavelet_and_Arnold_Transforms"><img alt="Research paper thumbnail of Secure and Robust Image Watermarking using Discrete Wavelet and Arnold Transforms" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/92623148/Secure_and_Robust_Image_Watermarking_using_Discrete_Wavelet_and_Arnold_Transforms">Secure and Robust Image Watermarking using Discrete Wavelet and Arnold Transforms</a></div><div class="wp-workCard_item"><span>2022 37th International Technical Conference on Circuits/Systems, Computers and Communications (ITC-CSCC)</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="92623148"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="92623148"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 92623148; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=92623148]").text(description); $(".js-view-count[data-work-id=92623148]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 92623148; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='92623148']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=92623148]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":92623148,"title":"Secure and Robust Image Watermarking using Discrete Wavelet and Arnold Transforms","internal_url":"https://www.academia.edu/92623148/Secure_and_Robust_Image_Watermarking_using_Discrete_Wavelet_and_Arnold_Transforms","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="92623147"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/92623147/Prevalence_of_depression_and_stress_among_the_first_year_students_in_Suranaree_University_of_Technology_Thailand"><img alt="Research paper thumbnail of Prevalence of depression and stress among the first year students in Suranaree University of Technology, Thailand" class="work-thumbnail" src="https://attachments.academia-assets.com/95583054/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/92623147/Prevalence_of_depression_and_stress_among_the_first_year_students_in_Suranaree_University_of_Technology_Thailand">Prevalence of depression and stress among the first year students in Suranaree University of Technology, Thailand</a></div><div class="wp-workCard_item"><span>Health Psychology Research</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The objectives of this study were to evaluate the level of depression and stress among the first-...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The objectives of this study were to evaluate the level of depression and stress among the first-year students at Suranaree University of Technology (SUT) and to compare the level of depression and stress among the samples, classified by demographic factors, including gender, domicile, and problem. This research has been approved by the SUT’s Research Ethics Committee. The study period was between July and August 2018. The online, self-report questionnaire was used as a research instrument to collect data from the sample of SUT first-year students. The total number of first-year students at SUT was 3,552 and the response rate was 65.15%. The major findings revealed that 7.0% and 51.1% of them were suffering from depression, and pathological stress, respectively. In addition, the prevalence of depression and pathological stress was higher in female samples than in other gender groups. The findings would suggest that related activities should be organized to promote students’ awarenes...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="bb7607d64ca8fcbfe592a0777f8ac7d9" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:95583054,&quot;asset_id&quot;:92623147,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/95583054/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="92623147"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="92623147"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 92623147; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=92623147]").text(description); $(".js-view-count[data-work-id=92623147]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 92623147; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='92623147']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "bb7607d64ca8fcbfe592a0777f8ac7d9" } } $('.js-work-strip[data-work-id=92623147]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":92623147,"title":"Prevalence of depression and stress among the first year students in Suranaree University of Technology, Thailand","internal_url":"https://www.academia.edu/92623147/Prevalence_of_depression_and_stress_among_the_first_year_students_in_Suranaree_University_of_Technology_Thailand","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[{"id":95583054,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/95583054/thumbnails/1.jpg","file_name":"35464-prevalence-of-depression-and-stress-among-the-first-year-students-in-suranaree-university-of-technology-thailand.pdf__filename_UTF.pdf","download_url":"https://www.academia.edu/attachments/95583054/download_file","bulk_download_file_name":"Prevalence_of_depression_and_stress_amon.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/95583054/35464-prevalence-of-depression-and-stress-among-the-first-year-students-in-suranaree-university-of-technology-thailand.pdf__filename_UTF-libre.pdf?1670775948=\u0026response-content-disposition=attachment%3B+filename%3DPrevalence_of_depression_and_stress_amon.pdf\u0026Expires=1740157310\u0026Signature=aq9HhC~XuWdRxQ2DqubrQpIYm1dpkvh6IX4TlWwa77mtsRmDzsHn3dONtfX2fL4MMpQ2Th3gPaJrV~WDQ2GnngsSL2FysjnnG~hZXx6YXtAzrnhR19GRWZEacK8Whd1rn8oS8GL1Z-9pI-eLjjVTC~ghOTpgQOUqOSL2J8qj9sfT24A5dvgDjF2BXhpGH~P3N8temLOGI3JHwhorTN1kdguzAHSJHYSIiYHeKcnBnvPsZtuUcrgma8n~1xmiBcF82O7ppjKDpmFk215x68lakGeBbDn8ZvvmvPixRbv-6G7yK91lUPzziwD6WVlcl5SKW0u42ISXubp9g8k7l~VTww__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="92623132"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/92623132/Forecast_Coral_Bleaching_by_Machine_Learnings_of_Remotely_Sensed_Geospatial_Data"><img alt="Research paper thumbnail of Forecast Coral Bleaching by Machine Learnings of Remotely Sensed Geospatial Data" class="work-thumbnail" src="https://attachments.academia-assets.com/95583046/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/92623132/Forecast_Coral_Bleaching_by_Machine_Learnings_of_Remotely_Sensed_Geospatial_Data">Forecast Coral Bleaching by Machine Learnings of Remotely Sensed Geospatial Data</a></div><div class="wp-workCard_item"><span>International Journal of Design &amp;amp; Nature and Ecodynamics</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">With the rapid changes in Earth climates, coral bleaching has been spreading worldwide and gettin...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">With the rapid changes in Earth climates, coral bleaching has been spreading worldwide and getting much severe. It is considered an imminent threat to marine animals as well as causing adverse impacts on fisheries and tourisms. Environmental agencies in affected regions have been made aware of the problem and hence starting to contain coral bleaching. Thus far, they often rely on conventional site survey to determine suitable sites to intervene and commence coral reef reviving process. With the recent advances in remote sensing technology, sea surface temperature (SST), acquired by satellites, has become a viable delegate to coral bleaching. Predicting coral bleaching based solely on SST is limited, as it is only one of many determinants. In addition, areas with different SST levels also exhibit different bleaching characteristics. Hence, area specific models are important for appropriately monitoring the events. Thus far, forecasting the bleaching based on SST alone has limited acc...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="2378e894f5ea253f6868664d178f135c" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:95583046,&quot;asset_id&quot;:92623132,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/95583046/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="92623132"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="92623132"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 92623132; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=92623132]").text(description); $(".js-view-count[data-work-id=92623132]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 92623132; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='92623132']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "2378e894f5ea253f6868664d178f135c" } } $('.js-work-strip[data-work-id=92623132]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":92623132,"title":"Forecast Coral Bleaching by Machine Learnings of Remotely Sensed Geospatial Data","internal_url":"https://www.academia.edu/92623132/Forecast_Coral_Bleaching_by_Machine_Learnings_of_Remotely_Sensed_Geospatial_Data","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[{"id":95583046,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/95583046/thumbnails/1.jpg","file_name":"17.03_13.pdf","download_url":"https://www.academia.edu/attachments/95583046/download_file","bulk_download_file_name":"Forecast_Coral_Bleaching_by_Machine_Lear.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/95583046/17.03_13-libre.pdf?1670775971=\u0026response-content-disposition=attachment%3B+filename%3DForecast_Coral_Bleaching_by_Machine_Lear.pdf\u0026Expires=1740157310\u0026Signature=V16bZ50bdOgnrCXH5UuiXsJ4nB58p20F-qF1JE2yQpwYkHTORSt63dcDd4yFuJueA8DfgdyvaoQlgFr8EQv-0WnlJ~IuKnfWP6xBLBWogpJbH8yu4Y3OOwrUiiN1Ma5c8lWHru7fmItn3CtOWv~q0LYmg2BV5eQVZpPFy5Cww~nh0hDYg~Irc~8NGbW3JBSdyr5UG7tsawTHaPq~eEHuy07JTVm0vEDZxT1y2jt7XlQHRfK1Q-iFeTb2aZJm0YLLR1dl2C6ib~kDztSsLmPeCqHh8JzcTP7tII0ce-V~WLEGQlu0TafBaR5c-HEMazUa-INOJhlR4TWv0gkvbTLANA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="86305215"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/86305215/Multimodal_Fusion_of_Deeply_Inferred_Point_Clouds_for_3D_Scene_Reconstruction_Using_Cross_Entropy_ICP"><img alt="Research paper thumbnail of Multimodal Fusion of Deeply Inferred Point Clouds for 3D Scene Reconstruction Using Cross-Entropy ICP" class="work-thumbnail" src="https://attachments.academia-assets.com/90786958/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/86305215/Multimodal_Fusion_of_Deeply_Inferred_Point_Clouds_for_3D_Scene_Reconstruction_Using_Cross_Entropy_ICP">Multimodal Fusion of Deeply Inferred Point Clouds for 3D Scene Reconstruction Using Cross-Entropy ICP</a></div><div class="wp-workCard_item"><span>IEEE Access</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Depth estimation is a crucial step toward 3D scene understanding. Most traditional systems rely o...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Depth estimation is a crucial step toward 3D scene understanding. Most traditional systems rely on direct sensing of this information by means of photogrammetry or on stereo imaging. As the scenes getting more complex, these modalities were impeded by, for instances, occlusion and imperfect lighting condition, etc. As a consequence, reconstructed surfaces are normally left with voids, due to missing data. Therefore, surface regularization is often required as post-processing. With the recent advances in deep learning, depth inference from a monocular image has attracted considerable interests. Many convolutional architectures have been proposed to infer depth information from a monocular image, with promising results. Thus far, visual cues learned and generalized by these networks may be ambiguous, resulting in inaccurate estimation. To address these issues, this paper presents an effective method for fusing point clouds extracted from depth values, directly measured by an infrared camera and estimated by a modified ResNet-50 from an RGB image, of the same scene. To ensure robustness and efficiency of finding the correspondence between and aligning these point clouds, an information theoretic alignment strategy, called CEICP, was proposed. The experimental results on a public dataset demonstrated that the proposed method outperformed its counterparts, while producing good quality surface renditions of the underlying scene.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="b8076d47800822491b50c85845b48dd8" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:90786958,&quot;asset_id&quot;:86305215,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/90786958/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="86305215"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="86305215"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 86305215; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=86305215]").text(description); $(".js-view-count[data-work-id=86305215]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 86305215; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='86305215']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "b8076d47800822491b50c85845b48dd8" } } $('.js-work-strip[data-work-id=86305215]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":86305215,"title":"Multimodal Fusion of Deeply Inferred Point Clouds for 3D Scene Reconstruction Using Cross-Entropy ICP","internal_url":"https://www.academia.edu/86305215/Multimodal_Fusion_of_Deeply_Inferred_Point_Clouds_for_3D_Scene_Reconstruction_Using_Cross_Entropy_ICP","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[{"id":90786958,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/90786958/thumbnails/1.jpg","file_name":"09834942.pdf","download_url":"https://www.academia.edu/attachments/90786958/download_file","bulk_download_file_name":"Multimodal_Fusion_of_Deeply_Inferred_Poi.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/90786958/09834942-libre.pdf?1662651866=\u0026response-content-disposition=attachment%3B+filename%3DMultimodal_Fusion_of_Deeply_Inferred_Poi.pdf\u0026Expires=1740157310\u0026Signature=MUUex-RSgFxmMFw-BbiJfbBOPwf5fyDbvNOZnOvxiif-GyLENy0q6EHEGIijKUFralOOS5NAPHffmrtANeapvotKS1G0RzTVSqhEA~6hz3hPj~TyFVxZDRX3N0vToIRRalpvPyeAsIvRN-xeJoDGFUHZzwjd9U5AOvBdGJ5DsuOoh4QzJJM2XDNY32hxwAXDyhpNY1RCOcA2Mj5O5S-eQbWMa1VvqnCl5Mqo60sJNsUhpNajwdhI6p9JJeknj8~rit5EE92sMWwI8QmtGmA00wxSgD5h-28Dr9gEu-qCv9VeBAQH8ETtkC5n39aZ1XJ3x0KNzzRgXFnCiPpMMdVsRA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="86305168"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/86305168/JavaScript_3D_graphics_library_for_agricultural_geographic_information_system"><img alt="Research paper thumbnail of JavaScript 3D graphics library for agricultural geographic information system" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/86305168/JavaScript_3D_graphics_library_for_agricultural_geographic_information_system">JavaScript 3D graphics library for agricultural geographic information system</a></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="86305168"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="86305168"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 86305168; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=86305168]").text(description); $(".js-view-count[data-work-id=86305168]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 86305168; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='86305168']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=86305168]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":86305168,"title":"JavaScript 3D graphics library for agricultural geographic information system","internal_url":"https://www.academia.edu/86305168/JavaScript_3D_graphics_library_for_agricultural_geographic_information_system","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="83191106"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/83191106/Urban_areas_extraction_from_multi_sensor_data_based_on_machine_learning_and_data_fusion"><img alt="Research paper thumbnail of Urban areas extraction from multi sensor data based on machine learning and data fusion" class="work-thumbnail" src="https://attachments.academia-assets.com/88622171/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/83191106/Urban_areas_extraction_from_multi_sensor_data_based_on_machine_learning_and_data_fusion">Urban areas extraction from multi sensor data based on machine learning and data fusion</a></div><div class="wp-workCard_item"><span>Pattern Recognition and Image Analysis</span><span>, 2017</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Accurate urban areas information is important for a variety of applications, especially city plan...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Accurate urban areas information is important for a variety of applications, especially city planning and natural disaster prediction and management. In recent years, extraction of urban structures from remotely sensed images has been extensively explored. The key advantages of this imaging modality are reduction of surveying expense and time. It also elevates restrictions on ground surveys. Thus far, much research typically extracts these structures from very high resolution satellite imagery, which are unfortunately of relatively poor spectral resolution, resulting in good precision yet moderate accuracy. Therefore, this paper investigates extraction of buildings from middle and high resolution satellite images by using spectral indices</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="e6589146b20ec34ed1a73dbfe254d87b" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:88622171,&quot;asset_id&quot;:83191106,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/88622171/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="83191106"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="83191106"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 83191106; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=83191106]").text(description); $(".js-view-count[data-work-id=83191106]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 83191106; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='83191106']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "e6589146b20ec34ed1a73dbfe254d87b" } } $('.js-work-strip[data-work-id=83191106]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":83191106,"title":"Urban areas extraction from multi sensor data based on machine learning and data fusion","internal_url":"https://www.academia.edu/83191106/Urban_areas_extraction_from_multi_sensor_data_based_on_machine_learning_and_data_fusion","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[{"id":88622171,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/88622171/thumbnails/1.jpg","file_name":"S105466181604013120220715-1-1m1fot1.pdf","download_url":"https://www.academia.edu/attachments/88622171/download_file","bulk_download_file_name":"Urban_areas_extraction_from_multi_sensor.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/88622171/S105466181604013120220715-1-1m1fot1-libre.pdf?1657889263=\u0026response-content-disposition=attachment%3B+filename%3DUrban_areas_extraction_from_multi_sensor.pdf\u0026Expires=1740157310\u0026Signature=RN46vYN2HysMNHSymXIeTFyTQICa41V4f7QSgdFFpHzL6MS0qdT2mFR0V7ZwVigZkky2cBZYhevQi9dJrHrOrXR7XuATptzC~1UqayeDxa2fy-ARA~iWc5UySTL15~t2037TpeMunsG7yal5sxhcM0-3RkKqtbaBvrOU4t-dOvOrpVwzHHlKmDqtPOCo~KPIhxEw3-RuhnXPANH4JUhV6cW6Vvjjw44BIKiiCiiKr4Kz4pc4Te3u31P1FZqhGUtd1cRoEbGJIMwFLH7LztSSli1kWPwRIae~UzH7XKTxxa5HSItLsQZgn-JY5kLdK0YXrqTvMVsAtGFJXKiE45KvJQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="78215533"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/78215533/DWT_MFCC_Feature_Extraction_for_Tile_Tapping_Sound_Classification"><img alt="Research paper thumbnail of DWT/ MFCC Feature Extraction for Tile Tapping Sound Classification" class="work-thumbnail" src="https://attachments.academia-assets.com/85340237/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/78215533/DWT_MFCC_Feature_Extraction_for_Tile_Tapping_Sound_Classification">DWT/ MFCC Feature Extraction for Tile Tapping Sound Classification</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Tile tapping sound inspection is a process of construction quality control. Hollow sound, for ins...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Tile tapping sound inspection is a process of construction quality control. Hollow sound, for instance, indicate low quality tessellation and thus voids underneath that could lead to future broken tiles. Hollow-sounding inspection was often carried out by construction specialists, whose skills and judgment may vary across individual. This paper elevates this issue and presents a Deep Learning (DL) classification method for computerized sounding tile inspection. Unlike other existing works in the area, where structural details were assessed, this study acquired tapping sound signals and analyzed them in a spectral domain by using Discrete Wavelet Transform (DWT) and Mel-frequency Cepstral Coefficients (MFCC). The dull versus hollow sounding tile were then classified based on these features by means of a Convolutional Neural Network (CNN). The experiments carried out in a laboratory tessellation indicated that the proposed method could differentiate dull from hollow-sounding tiles wit...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="80336f8904150c957c680a299770dd48" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:85340237,&quot;asset_id&quot;:78215533,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/85340237/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="78215533"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="78215533"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 78215533; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=78215533]").text(description); $(".js-view-count[data-work-id=78215533]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 78215533; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='78215533']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "80336f8904150c957c680a299770dd48" } } $('.js-work-strip[data-work-id=78215533]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":78215533,"title":"DWT/ MFCC Feature Extraction for Tile Tapping Sound Classification","internal_url":"https://www.academia.edu/78215533/DWT_MFCC_Feature_Extraction_for_Tile_Tapping_Sound_Classification","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[{"id":85340237,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/85340237/thumbnails/1.jpg","file_name":"480424909.pdf","download_url":"https://www.academia.edu/attachments/85340237/download_file","bulk_download_file_name":"DWT_MFCC_Feature_Extraction_for_Tile_Tap.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/85340237/480424909-libre.pdf?1651495728=\u0026response-content-disposition=attachment%3B+filename%3DDWT_MFCC_Feature_Extraction_for_Tile_Tap.pdf\u0026Expires=1740157310\u0026Signature=fPZ95x0BtrAcv8-tBinJh2Os7Op0Gg8ras8KXsW8HUp-GteIQTTw~B~fDR8~DsU184~HJZ8xyX6IJDSoCx1ZInJKUumxXpO4g-Zays1ftL-vutdHjWcmeNVKGVL5~6qdYd-3VgledACBkDc9gGjeZr0WWhWkhn7pmsl6GUiyMGchomIGHGn0FeamuIFRPw5wFcyE33hkp6f0Vf4iv1r3D43vZ4bRRmfWUXYkRosFqCJleOv2m~fWNvCLn9Kjwzsrtp6kfsQEoTKAJngKhS9lwq4NM0i8jT2ekQo8rf2PTHLWeTpIHe8QwSCr7MC4G4VxQ~GnfBh8tW-YoepitsZDXA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> </div><div class="profile--tab_content_container js-tab-pane tab-pane" data-section-id="11401106" id="papers"><div class="js-work-strip profile--work_container" data-work-id="125528953"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/125528953/Non_Destructive_Inspection_of_Tile_Debonding_by_DWT_and_MFCC_of_Tile_Tapping_Sound_with_Machine_versus_Deep_Learning_Models"><img alt="Research paper thumbnail of Non-Destructive Inspection of Tile Debonding by DWT and MFCC of Tile-Tapping Sound with Machine versus Deep Learning Models" class="work-thumbnail" src="https://attachments.academia-assets.com/119555165/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/125528953/Non_Destructive_Inspection_of_Tile_Debonding_by_DWT_and_MFCC_of_Tile_Tapping_Sound_with_Machine_versus_Deep_Learning_Models">Non-Destructive Inspection of Tile Debonding by DWT and MFCC of Tile-Tapping Sound with Machine versus Deep Learning Models</a></div><div class="wp-workCard_item"><span>ECTI Transactions on Computer and Information Technology</span><span>, Jan 20, 2024</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">One of the essential processes of construction quality control is tile bonding inspection. Hollow...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">One of the essential processes of construction quality control is tile bonding inspection. Hollows beneath tile tessellation can lead to unbounded or completely broken tiles. An interior inspector typically used a hollowsounding technique. However, it relies on skill and judgment that greatly vary among individuals. Moreover, equipment and interpretation are difcult to calibrate and standardize. This paper addresses these issues by employing machine-learning strategies for tile-tapping sound classication. Provided that a tapping signal was digitally acquired, the proposed method was fully computerized. Firstly, the signal was analyzed and its wavelets and MFCC were extracted. The corresponding spectral features were then classied by SVM, k-NN, Naïve Bayes, and Logistic Regression algorithm, in turn. The results were subsequently compared against those from a previous works that employed a deep learning strategy. It was revealed that when the proposed method was properly congured, it required much less computing resources than the deep learning based one, while being able to distinguish dull from hollow sounding tiles with 93.67% accuracy.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="2e706f781a10018ebe606173d65ddd08" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:119555165,&quot;asset_id&quot;:125528953,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/119555165/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="125528953"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="125528953"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 125528953; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=125528953]").text(description); $(".js-view-count[data-work-id=125528953]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 125528953; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='125528953']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "2e706f781a10018ebe606173d65ddd08" } } $('.js-work-strip[data-work-id=125528953]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":125528953,"title":"Non-Destructive Inspection of Tile Debonding by DWT and MFCC of Tile-Tapping Sound with Machine versus Deep Learning Models","internal_url":"https://www.academia.edu/125528953/Non_Destructive_Inspection_of_Tile_Debonding_by_DWT_and_MFCC_of_Tile_Tapping_Sound_with_Machine_versus_Deep_Learning_Models","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[{"id":119555165,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/119555165/thumbnails/1.jpg","file_name":"171870.pdf","download_url":"https://www.academia.edu/attachments/119555165/download_file","bulk_download_file_name":"Non_Destructive_Inspection_of_Tile_Debon.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/119555165/171870-libre.pdf?1731564858=\u0026response-content-disposition=attachment%3B+filename%3DNon_Destructive_Inspection_of_Tile_Debon.pdf\u0026Expires=1740106380\u0026Signature=fzGV0pKYXjjR9bknZ92UZ6v8u6tmzUrEHpHmMUCgpWb5xhkwf92vh9C1ziznTovx-~Uav5vT3LrSlZKO2FxoNUCG6FyqyPhJeHncTYbytMw4EN6XM2LbdBS5f2mUnEvOs5-Aze86K7TR8Jrk6VeGUY982vYo5QLnwWDiVUHVU1FbEOAmrOjlmpq2xVrPoRtxtGL2ATeS2jiwWYWgp9iiSnOeUX0lSjoW7F19WrBM4VxqLIY-Fajelt4slHCuEN9mOufRjb-pqUNf0R~AfMkZEDjCQcHlWkv4~ywtmZ6UN67TnNJR45mTOlBN-cU99DiOILTMl0vm3MfqzSqedC0q4g__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"},{"id":119555164,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/119555164/thumbnails/1.jpg","file_name":"171870.pdf","download_url":"https://www.academia.edu/attachments/119555164/download_file","bulk_download_file_name":"Non_Destructive_Inspection_of_Tile_Debon.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/119555164/171870-libre.pdf?1731564859=\u0026response-content-disposition=attachment%3B+filename%3DNon_Destructive_Inspection_of_Tile_Debon.pdf\u0026Expires=1740106380\u0026Signature=GsSV84-KTlklZQlQf-hQXswLAwB6QMpt66uKzNF6megkgd7lJGP9e4PTkfVnUDxcOSF-osTeCr30UK-64bDl7juRleXtj4i5cOqk9zOW9cgYSo9v0mk1nm8XxBRtbBU5j3Y3n~lX0s7zmtYza8nBSGnHiXGYxhWLuJXir0A9Fi3yBBmla3SqeswMdFOHEID7LT8VuzYi-rXGOnWK2kzrRnbb-Xph1vD0LIJZo-sq506UzqEWTevrWR9UMaKAA2Zp2apIlCj81EsXCo7ok8lNeXWz~4K0lpr3d4hrykaWdVagLq3RuS976ax0PUqg84iQtT0MDI-o2uyx7SawLEZzLg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="125528951"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/125528951/A_Geospatial_Donation_Platform_for_COVID_19_and_Beyond_Leveraging_Location_Based_Services_and_Geofencing"><img alt="Research paper thumbnail of A Geospatial Donation Platform for COVID-19 and Beyond, Leveraging Location – Based Services and Geofencing" class="work-thumbnail" src="https://attachments.academia-assets.com/119555160/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/125528951/A_Geospatial_Donation_Platform_for_COVID_19_and_Beyond_Leveraging_Location_Based_Services_and_Geofencing">A Geospatial Donation Platform for COVID-19 and Beyond, Leveraging Location – Based Services and Geofencing</a></div><div class="wp-workCard_item"><span>TEM Journal</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The current global scenario is characterized by epidemics and various types of disasters, severel...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The current global scenario is characterized by epidemics and various types of disasters, severely impacting communities&amp;#39; health, living conditions, and economic stability. Especially during such crises, the requirement for essential necessities becomes critical. Existing solution guidelines involve receiving donated items from public agencies through an offline system to provide assistance to victims. However, this implementation faces several limitations, such as a lack of understanding of people&amp;#39;s needs in specific areas, leading to mismatches between assistance and actual requirements. Additionally, donators lack adequate information, resulting in further discrepancies between donated items and the victims&amp;#39; genuine needs. The available geospatial platforms primarily support surveillance and monitoring of epidemic or disaster situations but fail to address the management of needs related to donation and receipt. Through an extensive review of the literature and relate...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="d7f9cb83033ee12afb36a08cfbe04c7b" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:119555160,&quot;asset_id&quot;:125528951,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/119555160/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="125528951"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="125528951"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 125528951; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=125528951]").text(description); $(".js-view-count[data-work-id=125528951]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 125528951; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='125528951']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "d7f9cb83033ee12afb36a08cfbe04c7b" } } $('.js-work-strip[data-work-id=125528951]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":125528951,"title":"A Geospatial Donation Platform for COVID-19 and Beyond, Leveraging Location – Based Services and Geofencing","internal_url":"https://www.academia.edu/125528951/A_Geospatial_Donation_Platform_for_COVID_19_and_Beyond_Leveraging_Location_Based_Services_and_Geofencing","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[{"id":119555160,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/119555160/thumbnails/1.jpg","file_name":"TEMJournalAugust2023_1902_1913.pdf","download_url":"https://www.academia.edu/attachments/119555160/download_file","bulk_download_file_name":"A_Geospatial_Donation_Platform_for_COVID.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/119555160/TEMJournalAugust2023_1902_1913-libre.pdf?1731564864=\u0026response-content-disposition=attachment%3B+filename%3DA_Geospatial_Donation_Platform_for_COVID.pdf\u0026Expires=1740106380\u0026Signature=VpjfeNRn1DArP6vUvtK5-IBsSBFDb5PPKBEPRi1FFnaaMTDP5I8g3MwUa58nDLI8eZYssmAjK4X~gSBb2hgUPfhP8eddWM9o9ugbKn3aZpjsm7xdm4INqFTZwv9I~VIPKx0wBs1Cj0ucK7BynN2esxUpbA4Se5uO8JTDIouZEj9Yzsq1fW6WzGYppSZgmYkw8L9dz0Vj-r4ylTl3jP1lkA3dBaBr7B3dK31qVhvNWqpcv~~kElWm5v4MEweDaF-5qbOzFYVrSXZZz-eZcVcTp5rOv6lLoGAaywAiJ9gK6f0ppppJ~Ub95tiAeyFVQP2tY4z-O7fQHRn3HQIxIaiH1g__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"},{"id":119555161,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/119555161/thumbnails/1.jpg","file_name":"TEMJournalAugust2023_1902_1913.pdf","download_url":"https://www.academia.edu/attachments/119555161/download_file","bulk_download_file_name":"A_Geospatial_Donation_Platform_for_COVID.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/119555161/TEMJournalAugust2023_1902_1913-libre.pdf?1731564862=\u0026response-content-disposition=attachment%3B+filename%3DA_Geospatial_Donation_Platform_for_COVID.pdf\u0026Expires=1740106380\u0026Signature=T0gwgGv1xrMgCTi--n2ELPoIIucseTISMaVkwn4ob~T4-WfMN4QAEEpEPs~X11jnuGS41CoMwgyF4mthKJskc0xPyZ57jxZRGeYK52qlN~93zai94mfi~6lckvKXYKlQpBBHSr5QIAG1ZYiYtABrAKXpsJP5pzNLYQuLB6KdcRG5HHOGz9q5UGnjxZe44VOBheNuAIpjAyBsVQRBgqW4TfrRQEOybXEMhS~AMAoSg~8KiV~EsglLf4u0Ni1cwfk9UGozwG2jhUydGK7fmWIXKzDa4UlHSyB-7Br1QuzusOXc2m03JA6V3i~aridgsoQ3rUt9ac~SaFGf9DMOLzY2MA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="116363517"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/116363517/Flood_Warning_and_Management_Schemes_with_Drone_Emulator_Using_Ultrasonic_and_Image_Processing"><img alt="Research paper thumbnail of Flood Warning and Management Schemes with Drone Emulator Using Ultrasonic and Image Processing" class="work-thumbnail" src="https://attachments.academia-assets.com/112514988/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/116363517/Flood_Warning_and_Management_Schemes_with_Drone_Emulator_Using_Ultrasonic_and_Image_Processing">Flood Warning and Management Schemes with Drone Emulator Using Ultrasonic and Image Processing</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/BoonchooSrikudkao">Boonchoo Srikudkao</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://biotech.academia.edu/ParamateHorkaew">Paramate Horkaew</a></span></div><div class="wp-workCard_item"><span>Advances in Intelligent Systems and Computing</span><span>, 2015</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The objective of this paper is to assess the feasibility of an alternative approach to collect wa...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The objective of this paper is to assess the feasibility of an alternative approach to collect water information relating to flooding crisis by means of a small drone. This information includes aerial images, their geographic locations, water flow velocity and its direction, all of which are normally difficult to obtain and in fact expensive should a conventional helicopter or buoyancy are opted. With a drone, however, these acquisitions can be done by a minimally trained operator and under controlled budget. This paper presents the breakout configuration and integration of various sensors and their data management scheme based on a series of image processing techniques, emulating the tasks required to estimate the key flood related parameters. The experimental results reported herein could provide a basis for determining its potential applications in flood warning and predicting systems, as well as concerns that need to be addressed.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="5ee6b70f9c84a87c58d385ef74ed1ab6" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:112514988,&quot;asset_id&quot;:116363517,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/112514988/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="116363517"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="116363517"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 116363517; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=116363517]").text(description); $(".js-view-count[data-work-id=116363517]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 116363517; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='116363517']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "5ee6b70f9c84a87c58d385ef74ed1ab6" } } $('.js-work-strip[data-work-id=116363517]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":116363517,"title":"Flood Warning and Management Schemes with Drone Emulator Using Ultrasonic and Image Processing","internal_url":"https://www.academia.edu/116363517/Flood_Warning_and_Management_Schemes_with_Drone_Emulator_Using_Ultrasonic_and_Image_Processing","owner_id":5600995,"coauthors_can_edit":true,"owner":{"id":5600995,"first_name":"Boonchoo","middle_initials":null,"last_name":"Srikudkao","page_name":"BoonchooSrikudkao","domain_name":"independent","created_at":"2013-09-17T07:10:46.939-07:00","display_name":"Boonchoo Srikudkao","url":"https://independent.academia.edu/BoonchooSrikudkao"},"attachments":[{"id":112514988,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/112514988/thumbnails/1.jpg","file_name":"978-3-319-19024-2_11.pdf","download_url":"https://www.academia.edu/attachments/112514988/download_file","bulk_download_file_name":"Flood_Warning_and_Management_Schemes_wit.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/112514988/978-3-319-19024-2_11-libre.pdf?1710736760=\u0026response-content-disposition=attachment%3B+filename%3DFlood_Warning_and_Management_Schemes_wit.pdf\u0026Expires=1740157310\u0026Signature=VFyvqHXNT-NS-6FuGViveTKXeysRQIFBji1YJ64Wb0wrRxfW~MjAXmzOSoHCiuyB~hNlrbYSxF1niKoEFcea~e5qZfT9AV9wZJeJGjkT3VhNawpJe2HFeKhkjzEamHKOtqjI0dPrKF3b50MM9dTwuseEc1FIRHE1-oocSYAZyMNiwO4wpwb2HdOwvaYGVxPmjWFQV2c8NgfKVHUoyRk5PfK-wtDtv7--1jzuQSdEOTonuFMNJOZ1T18IhfjGm~2qQHZneKKhp8ducZQud8LqdG436rjp3LyMA8p7JgvBntfUwKE5ElFdaysE03G7gL2AoG2TiJWPO-u4bOOjH~mFcg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111955748"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111955748/Factors_Contributing_to_Students_Engagement_A_Case_Study_at_the_Institute_of_Medicine_at_SUT"><img alt="Research paper thumbnail of Factors Contributing to Students Engagement: A Case Study at the Institute of Medicine at SUT" class="work-thumbnail" src="https://attachments.academia-assets.com/109337285/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111955748/Factors_Contributing_to_Students_Engagement_A_Case_Study_at_the_Institute_of_Medicine_at_SUT">Factors Contributing to Students Engagement: A Case Study at the Institute of Medicine at SUT</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">In higher education, much attention has been focused on the enhancement of the educational experi...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">In higher education, much attention has been focused on the enhancement of the educational experience, allowing students to successfully develop and thus make the most of not only their potential, but also the numerous other benefits education has to offer. Being engaged both institutionally and academically plays a vital part in developing their potential and performance. Therefore, this paper studied the engagement level towards the academics at the Institute of medicine. Factors contributing to institutional engagement were also analyzed. The participants include 229 medical students. Each participant was asked to answer a general demographic questionnaire, the Institute engagement questionnaire, the Utrecht Work Engagement Scale-Student version (UWES-S) questionnaire, and a questionnaire of all relevant factors. Student engagement was assessed through statistical analysis. These included percentage, mean, standard deviation, and stepwise multiple regression of the constituent factors. The Institute engagement level was 3.73. Factors that significantly pertained to the engagement level were teachers (p = 0.01*), staff (p = 0.01*), friends (p = 0.02*), and seniors peers (p = 0.03*), respectively. Academic engagement was found to vary by the level of study. Medical students in their 1st, 2nd, and 3rd years exhibited engagement levels of 4.94, 4.87, and 4.55, respectively. Given the group, students&#39; engagement toward the university was of a high level. The most important contributing factors were their relationship with teachers, staff, friends, and senior peers. However, the academic engagement level tended to decrease as study progressed. It was conjectured that this notable decrease resulted from increasing complexity in the program as specified by the curriculum. Positively engaged students better adapt to the academic context of higher education. Hence, they are much likely to succeed.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="ad207990e40c68357f323252d91f52c0" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109337285,&quot;asset_id&quot;:111955748,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109337285/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111955748"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111955748"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111955748; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111955748]").text(description); $(".js-view-count[data-work-id=111955748]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111955748; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111955748']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "ad207990e40c68357f323252d91f52c0" } } $('.js-work-strip[data-work-id=111955748]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111955748,"title":"Factors Contributing to Students Engagement: A Case Study at the Institute of Medicine at SUT","internal_url":"https://www.academia.edu/111955748/Factors_Contributing_to_Students_Engagement_A_Case_Study_at_the_Institute_of_Medicine_at_SUT","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[{"id":109337285,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109337285/thumbnails/1.jpg","file_name":"sjss_2C_08_Areerat_Siripongpan__2893-105_29.pdf_filename_UTF-8sjss_2C_08_Areerat_Siripongpan__2893-105_29.pdf","download_url":"https://www.academia.edu/attachments/109337285/download_file","bulk_download_file_name":"Factors_Contributing_to_Students_Engagem.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109337285/sjss_2C_08_Areerat_Siripongpan__2893-105_29.pdf_filename_UTF-8sjss_2C_08_Areerat_Siripongpan__2893-105_29-libre.pdf?1703132821=\u0026response-content-disposition=attachment%3B+filename%3DFactors_Contributing_to_Students_Engagem.pdf\u0026Expires=1740106380\u0026Signature=a09LAZu2XImo50V6eMUd4MauwJ766dgRlOhwcVCzZzpANATHtciuCerE6qFHWTMfqQmlazsG9XpmDnEvORIEr29R6M62GbNzGAt-ZynZmHL5zLelmJ7ZBKLp0kRBXUTTFUjQwMBMt0-yZPGh1NnCekGTvLerYqC8dS1HV53rG4I~2MB6yn11UXGX7FkrYNCjKYcSqPSGmJEK1xi79sTS7pCpJUfZreG6GK0vvO9kbejEh6YnOUqAMMIyPTNOavdpHNhyoG4sPElS1F5ehN6AwFBvCmhkcMTUi5oXKuhUMpNiWbfXG7D0QHY7xHayLsbNABshoBzFT4n2ppVuYn~KTQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111955747"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/111955747/Statistical_modelling_of_complex_topological_shapes_with_application_to_cardiovascular_imaging"><img alt="Research paper thumbnail of Statistical modelling of complex topological shapes with application to cardiovascular imaging" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/111955747/Statistical_modelling_of_complex_topological_shapes_with_application_to_cardiovascular_imaging">Statistical modelling of complex topological shapes with application to cardiovascular imaging</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The blood flow patterns in vivo are highly complex; they vary considerably from subject to subjec...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The blood flow patterns in vivo are highly complex; they vary considerably from subject to subject and even more so in patients with cardiovascular diseases. Over the last five years, there has been a rapid surge of interest in combining computational fluid dynamics (CFD) with in vivo imaging techniques for studying interactions between vessel morphology and blood flow patterns. CFD gives the ability to compute features/properties which cannot be measured, e.g. wall shear stress, mass transfer rate, but are important to studies of atherosclerosis, or the design of vessel prostheses. Moreover, it can also provide details of the flow which are often beyond the discrimination of the imaging techniques. This trend is driven by our increased understanding of biomechanics, maturity of computational modelling techniques, and advancement in imaging. To this end, accurate delineation of cardiac morphology and its associated in-flow/out-flow tracts is required. Due to the complex topology of the dynamic shapes involved, this procedure usually involves labour-intensive user interaction with a large amount of 4D spatio-temporal information. With the increasing popularity of the active shape and appearance models, 3D shape modelling and segmentation based on these techniques are gaining significant clinical interest. The practical quality of the statistical model relies on the definition of correspondence across a set of segmented samples. For time-varying 3D cardiovascular structures, landmarks based techniques are not only time consuming but also prone to subjective error, as temporal alignment of geometrical features is difficult. Moreover, when all of the structures including inflow/outflow tracts are considered, the shape to be modelled becomes highly complex even in its static form. This makes the identification of dense correspondence within the training set a significant challenge. The purpose of this thesis is to develop a practical approach towards optimal statistical modelling and segmentation for dynamic 3D objects with complex topology. The method relies on harmonic embedding for establishing optimal global correspondence for a set of dynamic surfaces. We first demonstrate how it can be used for shapes whose topological realization is homeomorphic to a compact 2D manifold with boundary. A conformal harmonic map and tensor product B-splines are used to create a multi-resolution representation of the surfaces that are re- parameterized by using hierarchical piecewise bilinear maps in a coarse-to-fine manner. The optimal global correspondence within the training shapes is identified by an objective function based on the principle of minimum description length. The strength of the method is demonstrated by building a concise yet physiologically plausible statistical shape model of the normal human left ventricle which has principal modes of variation that correspond to intrinsic cardiac motions. The proposed framework is then extended to dynamic shapes with higher genus. Criteria based on surface conformality and minimum description length are used to simultaneously identify the intrinsic global correspondence of the training data. The strength of the method is demonstrated by building a statistical model of the complex anatomical structure of the heart which includes atria, ventricles, aortic/pulmonary out How tracts, pulmonary veins/arteries, and superior/inferior vena cavae. The analysis of variance and leave-one-out-crossvalidation indicate that the derived model not only captures physiologically plausible modes of variation but also is robust and concise, thus greatly enhancing its potential clinical value. With this thesis, we also demonstrate how the derived dynamic statistical shape model can be used for 4D cardiac image segmentation and combined MR/CFD haemodynamic modelling.Open acces</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111955747"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111955747"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111955747; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111955747]").text(description); $(".js-view-count[data-work-id=111955747]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111955747; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111955747']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=111955747]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111955747,"title":"Statistical modelling of complex topological shapes with application to cardiovascular imaging","internal_url":"https://www.academia.edu/111955747/Statistical_modelling_of_complex_topological_shapes_with_application_to_cardiovascular_imaging","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111955746"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/111955746/Generate_an_adaptive_de_cubing_automatic_processing_for_laminated_object_manufacturing_LOM_"><img alt="Research paper thumbnail of Generate an adaptive de-cubing automatic processing for laminated object manufacturing (LOM)" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/111955746/Generate_an_adaptive_de_cubing_automatic_processing_for_laminated_object_manufacturing_LOM_">Generate an adaptive de-cubing automatic processing for laminated object manufacturing (LOM)</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">A de-cubing process are important in the Laminated Object Manufacturing (LOM) technique, creates ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">A de-cubing process are important in the Laminated Object Manufacturing (LOM) technique, creates a physical model directly from 3D CAD model without mold and dies by using laminated material. The de-cubing process is to assign shape of waste material into generally small square shape that can be easily remove in order to reduce time and avoid prototype damages. The adaptive de-cubing process applies proportion of number of black pixels on considering area per number of total pixels on considering area. If the proportion is more than a threshold, then the considering axis is divided. On the other hade, if the proportion is less than the threshold, then the considering axis is skipped. The adaptive process algorithm has been developed base on MATLAB platform. The result shown that the bigest threshold exploded the rough de-cubing and stair layer contour, while the smaller threshold produced the fine de-cubing and smooth layer contour.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111955746"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111955746"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111955746; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111955746]").text(description); $(".js-view-count[data-work-id=111955746]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111955746; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111955746']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=111955746]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111955746,"title":"Generate an adaptive de-cubing automatic processing for laminated object manufacturing (LOM)","internal_url":"https://www.academia.edu/111955746/Generate_an_adaptive_de_cubing_automatic_processing_for_laminated_object_manufacturing_LOM_","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111955745"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/111955745/Evaluation_of_the_First_Radiolabeled_99mTc_Jerusalem_Artichoke_Containing_Snack_Bar_on_Gastric_Emptying_and_Satiety_in_Healthy_Female_Volunteers"><img alt="Research paper thumbnail of Evaluation of the First Radiolabeled 99mTc-Jerusalem Artichoke-Containing Snack Bar on Gastric Emptying and Satiety in Healthy Female Volunteers" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/111955745/Evaluation_of_the_First_Radiolabeled_99mTc_Jerusalem_Artichoke_Containing_Snack_Bar_on_Gastric_Emptying_and_Satiety_in_Healthy_Female_Volunteers">Evaluation of the First Radiolabeled 99mTc-Jerusalem Artichoke-Containing Snack Bar on Gastric Emptying and Satiety in Healthy Female Volunteers</a></div><div class="wp-workCard_item"><span>Journal of the Medical Association of Thailand</span><span>, Apr 1, 2018</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111955745"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111955745"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111955745; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111955745]").text(description); $(".js-view-count[data-work-id=111955745]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111955745; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111955745']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=111955745]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111955745,"title":"Evaluation of the First Radiolabeled 99mTc-Jerusalem Artichoke-Containing Snack Bar on Gastric Emptying and Satiety in Healthy Female Volunteers","internal_url":"https://www.academia.edu/111955745/Evaluation_of_the_First_Radiolabeled_99mTc_Jerusalem_Artichoke_Containing_Snack_Bar_on_Gastric_Emptying_and_Satiety_in_Healthy_Female_Volunteers","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111955744"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/111955744/Land_use_and_land_cover_classification_from_satellite_images_based_on_ensemble_machine_learning_and_crowdsourcing_data_verification"><img alt="Research paper thumbnail of Land use and land cover classification from satellite images based on ensemble machine learning and crowdsourcing data verification" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/111955744/Land_use_and_land_cover_classification_from_satellite_images_based_on_ensemble_machine_learning_and_crowdsourcing_data_verification">Land use and land cover classification from satellite images based on ensemble machine learning and crowdsourcing data verification</a></div><div class="wp-workCard_item"><span>International Journal of Cartography</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111955744"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111955744"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111955744; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111955744]").text(description); $(".js-view-count[data-work-id=111955744]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111955744; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111955744']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=111955744]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111955744,"title":"Land use and land cover classification from satellite images based on ensemble machine learning and crowdsourcing data verification","internal_url":"https://www.academia.edu/111955744/Land_use_and_land_cover_classification_from_satellite_images_based_on_ensemble_machine_learning_and_crowdsourcing_data_verification","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111955743"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/111955743/Wavelet_based_Digital_Image_Watermarking_by_using_Lorenz_Chaotic_Signal_Localization"><img alt="Research paper thumbnail of Wavelet-based Digital Image Watermarking by using Lorenz Chaotic Signal Localization" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/111955743/Wavelet_based_Digital_Image_Watermarking_by_using_Lorenz_Chaotic_Signal_Localization">Wavelet-based Digital Image Watermarking by using Lorenz Chaotic Signal Localization</a></div><div class="wp-workCard_item"><span>J. Inf. Process. Syst.</span><span>, 2019</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Transmitting visual information over a broadcasting network is not only prone to a copyright viol...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Transmitting visual information over a broadcasting network is not only prone to a copyright violation but also is a forgery. Authenticating such information and protecting its authorship rights call for more advanced data encoding. To this end, electronic watermarking is often adopted to embed inscriptive signature in imaging data. Most existing watermarking methods while focusing on robustness against degradation remain lacking of measurement against security loophole in which the encrypting scheme once discovered may be recreated by an unauthorized party. This could reveal the underlying signature which may potentially be replaced or forged. This paper therefore proposes a novel digital watermarking scheme in temporal-frequency domain. Unlike other typical wavelet based watermarking, the proposed scheme employed the Lorenz chaotic map to specify embedding positions. Effectively making this is not only a formidable method to decrypt but also a stronger will against deterministic a...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111955743"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111955743"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111955743; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111955743]").text(description); $(".js-view-count[data-work-id=111955743]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111955743; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111955743']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=111955743]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111955743,"title":"Wavelet-based Digital Image Watermarking by using Lorenz Chaotic Signal Localization","internal_url":"https://www.academia.edu/111955743/Wavelet_based_Digital_Image_Watermarking_by_using_Lorenz_Chaotic_Signal_Localization","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111955737"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111955737/Eyewitnesses_Visual_Recollection_in_Suspect_Identification_by_using_Facial_Appearance_Model"><img alt="Research paper thumbnail of Eyewitnesses’ Visual Recollection in Suspect Identification by using Facial Appearance Model" class="work-thumbnail" src="https://attachments.academia-assets.com/109337273/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111955737/Eyewitnesses_Visual_Recollection_in_Suspect_Identification_by_using_Facial_Appearance_Model">Eyewitnesses’ Visual Recollection in Suspect Identification by using Facial Appearance Model</a></div><div class="wp-workCard_item"><span>Baghdad Science Journal</span><span>, Mar 1, 2020</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Facial recognition has been an active field of imaging science. With the recent progresses in com...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Facial recognition has been an active field of imaging science. With the recent progresses in computer vision development, it is extensively applied in various areas, especially in law enforcement and security. Human face is a viable biometric that could be effectively used in both identification and verification. Thus far, regardless of a facial model and relevant metrics employed, its main shortcoming is that it requires a facial image, against which comparison is made. Therefore, closed circuit televisions and a facial database are always needed in an operational system. For the last few decades, unfortunately, we have experienced an emergence of asymmetric warfare, where acts of terrorism are often committed in secluded area with no camera installed and possibly by persons whose photos have never been kept in any official database prior to the event. During subsequent investigations, the authorities thus had to rely on traumatized and frustrated witnesses, whose testimonial accounts regarding suspect&#39;s appearance are dubious and often misleading. To address this issue, this paper presents an application of a statistical appearance model of human face in assisting suspect identification based on witness&#39;s visual recollection. An online prototype system was implemented to demonstrate its core functionalities. Both visual and numerical assessments reported herein evidentially indicated potential benefits of the system for the intended purpose.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="2e9d2112e18a850c0afc11586b1e901d" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109337273,&quot;asset_id&quot;:111955737,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109337273/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111955737"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111955737"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111955737; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111955737]").text(description); $(".js-view-count[data-work-id=111955737]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111955737; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111955737']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "2e9d2112e18a850c0afc11586b1e901d" } } $('.js-work-strip[data-work-id=111955737]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111955737,"title":"Eyewitnesses’ Visual Recollection in Suspect Identification by using Facial Appearance Model","internal_url":"https://www.academia.edu/111955737/Eyewitnesses_Visual_Recollection_in_Suspect_Identification_by_using_Facial_Appearance_Model","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[{"id":109337273,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109337273/thumbnails/1.jpg","file_name":"20378.pdf","download_url":"https://www.academia.edu/attachments/109337273/download_file","bulk_download_file_name":"Eyewitnesses_Visual_Recollection_in_Susp.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109337273/20378-libre.pdf?1703132817=\u0026response-content-disposition=attachment%3B+filename%3DEyewitnesses_Visual_Recollection_in_Susp.pdf\u0026Expires=1740157310\u0026Signature=Bzhyum1i2nSQKD0fjjx61jaiuOVHjCh2S2YRre7TJx1H4qQVf6uYjDpqMbEJW7akPBryKJSKLDfhiKt2q52a7dPtK-W3qhr1vr96eHNeQICSKj0AlkiK0d6pquONQnqlObySDP7T7j9zuQh9AcysyEZI~-phWKo8YyG9Za8IDsZplRA0bntkO--Xc9LR~esHEvckigyzOui8zkEmNf2Vmv-piEvdilRK-gl8XVLONXDyEkmwitLbWPyDOlFc6yUW-hrnWv1jocmRclDF2YspW6IQVFtP3lH8ViOGXbjDWCpHdiXtwtQXsAn30mlHE03B6kYdyWBw0ILAYQL6kiSOdg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="108183845"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/108183845/Fuzzy_logic_rate_adjustment_controls_using_a_circuit_breaker_for_persistent_congestion_in_wireless_sensor_networks"><img alt="Research paper thumbnail of Fuzzy logic rate adjustment controls using a circuit breaker for persistent congestion in wireless sensor networks" class="work-thumbnail" src="https://attachments.academia-assets.com/106634311/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/108183845/Fuzzy_logic_rate_adjustment_controls_using_a_circuit_breaker_for_persistent_congestion_in_wireless_sensor_networks">Fuzzy logic rate adjustment controls using a circuit breaker for persistent congestion in wireless sensor networks</a></div><div class="wp-workCard_item"><span>Wireless Networks</span><span>, Mar 4, 2020</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Congestion control is necessary for enhancing the quality of service in wireless sensor networks ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Congestion control is necessary for enhancing the quality of service in wireless sensor networks (WSNs). With advances in sensing technology, a substantial amount of data traversing a WSN can easily cause congestion, especially given limited resources. As a consequence, network throughput decreases due to significant packet loss and increased delays. Moreover, congestion not only adversely affects the data traffic and transmission success rate but also excessively dissipates energy, which in turn reduces the sensor node and, hence, network lifespans. A typical congestion control strategy was designed to address congestion due to transient events. However, on many occasions, congestion was caused by repeated anomalies and, as a consequence, persisted for an extended period. This paper thus proposes a congestion control strategy that can eliminate both types of congestion. The study adopted a fuzzy logic algorithm for resolving congestion in three key areas: optimal path selection, traffic rate adjustment that incorporates a momentum indicator, and an optimal timeout setting for a circuit breaker to limit persistent congestion. With fuzzy logic, decisions can be made efficiently based on probabilistic weights derived from fitness functions of congestion-relevant parameters. The simulation and experimental results reported herein demonstrate that the proposed strategy outperforms state-of-the-art strategies in terms of the traffic rate, transmission delay, queue utilization, and energy efficiency.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="62f34abb14d018091aaa4085ded4610c" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:106634311,&quot;asset_id&quot;:108183845,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/106634311/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="108183845"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="108183845"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 108183845; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=108183845]").text(description); $(".js-view-count[data-work-id=108183845]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 108183845; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='108183845']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "62f34abb14d018091aaa4085ded4610c" } } $('.js-work-strip[data-work-id=108183845]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":108183845,"title":"Fuzzy logic rate adjustment controls using a circuit breaker for persistent congestion in wireless sensor networks","internal_url":"https://www.academia.edu/108183845/Fuzzy_logic_rate_adjustment_controls_using_a_circuit_breaker_for_persistent_congestion_in_wireless_sensor_networks","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[{"id":106634311,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/106634311/thumbnails/1.jpg","file_name":"s11276-020-02289-020231016-1-yygimj.pdf","download_url":"https://www.academia.edu/attachments/106634311/download_file","bulk_download_file_name":"Fuzzy_logic_rate_adjustment_controls_usi.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/106634311/s11276-020-02289-020231016-1-yygimj-libre.pdf?1697418491=\u0026response-content-disposition=attachment%3B+filename%3DFuzzy_logic_rate_adjustment_controls_usi.pdf\u0026Expires=1740157310\u0026Signature=Z5Uplw0IivCsVeOMoAOvLYsDT1iceXqHZau7o-Fm9NUcGuiQ0pfsStDIRPQ1B3QeA3Wv4v8-dV40O1fhErNoExJP9B89yvjhU6q4IN2WSxIMg4fulb1XjJoAPlIzmH~YA6OMTviA8qX1OLAM~QjZA6Gh2z-XkqXYD-abBEsSD193UfquwuqA7ykTsnzgGRVKu7wJMfOI1kA0tt1rSMi~tw3ZBPGPT9QlubYOWK8raqCMVQmUX3SzBrwv~3A0KwW1y~S6qoQr~4mVkxIomCdlXEwDByXj~hHfvawcINlldzWZ0zbq0CL28LN~AWmjiQ1UoNj~PKVvLQeyQu3kx4FePg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="108183844"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/108183844/A_Geospatial_Platform_for_Crowdsourcing_Green_Space_Area_Management_Using_GIS_and_Deep_Learning_Classification"><img alt="Research paper thumbnail of A Geospatial Platform for Crowdsourcing Green Space Area Management Using GIS and Deep Learning Classification" class="work-thumbnail" src="https://attachments.academia-assets.com/106634291/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/108183844/A_Geospatial_Platform_for_Crowdsourcing_Green_Space_Area_Management_Using_GIS_and_Deep_Learning_Classification">A Geospatial Platform for Crowdsourcing Green Space Area Management Using GIS and Deep Learning Classification</a></div><div class="wp-workCard_item"><span>ISPRS international journal of geo-information</span><span>, Mar 20, 2022</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">This article is an open access article distributed under the terms and conditions of the Creative...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">This article is an open access article distributed under the terms and conditions of the Creative Commons Attribution (CC BY</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="611a958b0dba0a2fcdfa5ef63977358b" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:106634291,&quot;asset_id&quot;:108183844,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/106634291/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="108183844"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="108183844"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 108183844; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=108183844]").text(description); $(".js-view-count[data-work-id=108183844]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 108183844; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='108183844']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "611a958b0dba0a2fcdfa5ef63977358b" } } $('.js-work-strip[data-work-id=108183844]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":108183844,"title":"A Geospatial Platform for Crowdsourcing Green Space Area Management Using GIS and Deep Learning Classification","internal_url":"https://www.academia.edu/108183844/A_Geospatial_Platform_for_Crowdsourcing_Green_Space_Area_Management_Using_GIS_and_Deep_Learning_Classification","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[{"id":106634291,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/106634291/thumbnails/1.jpg","file_name":"ijgi-11-00208-v2.pdf","download_url":"https://www.academia.edu/attachments/106634291/download_file","bulk_download_file_name":"A_Geospatial_Platform_for_Crowdsourcing.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/106634291/ijgi-11-00208-v2-libre.pdf?1697426248=\u0026response-content-disposition=attachment%3B+filename%3DA_Geospatial_Platform_for_Crowdsourcing.pdf\u0026Expires=1740157310\u0026Signature=IU0XIdM4cWymb5fgufM0vPzJgItdKYlZUaHOEMt2PANRxehAj6cy6ssVxhjsUaXHGC4QZE-M-OYx8qdWCFmHLQHA5BmB8JfBlLwV7Me0-LZ2YzI~WgyyLKZtR2Hu~jgSspw5MKzx0E5Il1fVFJRFOWv~bovJBneNIh1VpEEtMWYJ3pqDvkdIORIJ7CA7fnfFoAHJC8~H4daMDQwsmHym0oH9GIQC~DyFn5pqLcVspmONlNcx8UDJcu4vE3kYWNdKm4GS5DGO00AdSCKpko6Zq71cpATC8TaaET~9XVmeX2SS6mEJzNQBplkyfDX3sG4jofSxBUPzSEOFEwY1wKkU4g__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"},{"id":106634292,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/106634292/thumbnails/1.jpg","file_name":"ijgi-11-00208-v2.pdf","download_url":"https://www.academia.edu/attachments/106634292/download_file","bulk_download_file_name":"A_Geospatial_Platform_for_Crowdsourcing.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/106634292/ijgi-11-00208-v2-libre.pdf?1697426249=\u0026response-content-disposition=attachment%3B+filename%3DA_Geospatial_Platform_for_Crowdsourcing.pdf\u0026Expires=1740157310\u0026Signature=FqByvUUK7dGekUWsmVE4KvzdnnqYuw9oSNvN7oMT~WImFceNtHCQCst-SL5AtLsLWd6VCEYjnUwAKFZTXfXIzi7fg-T~5ddrc-~Ibk2-60lgcgMZe4rqz7tcb4BwIptfnmsv2vchzMcVrpmPr47Z2fbSKs2v9uHgww1JkTzzIym1l7LL3xkyOSJh5Klvw3mHwMzCXCkt2~47hHdm4suQ1yq-3Ttp81X~Q7vwXaW3sSg6jzHDeKg65CTXCIBnA3Qyyepe2iQk6XbVXVZmlxgsFQdgW1GDR9HtmtMw045DoHyyID5aKHfEoajpr-f3NfyB3Y1UX0ynTkF8pHvW~LbYgQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="108183842"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/108183842/Statistical_Shape_Modelling_of_the_Levator_Ani_with_Thickness_Variation"><img alt="Research paper thumbnail of Statistical Shape Modelling of the Levator Ani with Thickness Variation" class="work-thumbnail" src="https://attachments.academia-assets.com/106634298/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/108183842/Statistical_Shape_Modelling_of_the_Levator_Ani_with_Thickness_Variation">Statistical Shape Modelling of the Levator Ani with Thickness Variation</a></div><div class="wp-workCard_item"><span>Lecture Notes in Computer Science</span><span>, 2004</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The levator ani is vulnerable to injury during childbirth and effective surgical intervention req...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The levator ani is vulnerable to injury during childbirth and effective surgical intervention requires full knowledge of the morphology and mechanical properties of the muscle structure. The shape of the levator ani and regional thickening during different levels of physiological loading can provide an indication of pelvic floor dysfunction. This paper describes a coupled approach for shape and thickness statistical modelling based on harmonic embedding for volumetric analysis of the levator ani. With harmonic embedding, the dynamic information extracted by the statistical modelling captures shape and thickness variation of the levator ani across different subjects and during varying levels of stress. With this study, we demonstrate that the derived model is compact and physiologically meaningful, demonstrating the practical value of the technique.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="4bdab017b3112591d4bfe84d27b3d2ce" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:106634298,&quot;asset_id&quot;:108183842,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/106634298/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="108183842"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="108183842"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 108183842; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=108183842]").text(description); $(".js-view-count[data-work-id=108183842]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 108183842; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='108183842']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "4bdab017b3112591d4bfe84d27b3d2ce" } } $('.js-work-strip[data-work-id=108183842]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":108183842,"title":"Statistical Shape Modelling of the Levator Ani with Thickness Variation","internal_url":"https://www.academia.edu/108183842/Statistical_Shape_Modelling_of_the_Levator_Ani_with_Thickness_Variation","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[{"id":106634298,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/106634298/thumbnails/1.jpg","file_name":"10.1007_2F978-3-540-30135-6_32.pdf","download_url":"https://www.academia.edu/attachments/106634298/download_file","bulk_download_file_name":"Statistical_Shape_Modelling_of_the_Levat.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/106634298/10.1007_2F978-3-540-30135-6_32-libre.pdf?1697426234=\u0026response-content-disposition=attachment%3B+filename%3DStatistical_Shape_Modelling_of_the_Levat.pdf\u0026Expires=1740157310\u0026Signature=HgSRp3R1xSPb9uuN2fxOj073ztkZ8c8NdiMxxYJkBM517grz23bqiY8cZhxG-PN~kJC-mxAdOFz0ClRC92JYpg8HPL9ATFkrSM~dAc8d3CQxdsvwwdL0obYiAzBZta2acH03svyYSXw~4t3ldHvoqmOtsZPXmq9dtEw76Fp9imEVM7Xjf~yps~fSybjDSANN3Sj0-SBw6DbIvxfM8ZRY2e3TYfpCr3wnwXxggYGYYQqFTLWJzl7Gfj9itg-gR06jVCCmYLH1nWI58MW2Sn3p6htOT3vo9hvFrFdCoL9zZWUnq5PZSUv8bTT9OLzhQL1GTmkUFjvY2H5EyCXI~hC7mA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="92623148"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/92623148/Secure_and_Robust_Image_Watermarking_using_Discrete_Wavelet_and_Arnold_Transforms"><img alt="Research paper thumbnail of Secure and Robust Image Watermarking using Discrete Wavelet and Arnold Transforms" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/92623148/Secure_and_Robust_Image_Watermarking_using_Discrete_Wavelet_and_Arnold_Transforms">Secure and Robust Image Watermarking using Discrete Wavelet and Arnold Transforms</a></div><div class="wp-workCard_item"><span>2022 37th International Technical Conference on Circuits/Systems, Computers and Communications (ITC-CSCC)</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="92623148"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="92623148"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 92623148; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=92623148]").text(description); $(".js-view-count[data-work-id=92623148]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 92623148; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='92623148']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=92623148]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":92623148,"title":"Secure and Robust Image Watermarking using Discrete Wavelet and Arnold Transforms","internal_url":"https://www.academia.edu/92623148/Secure_and_Robust_Image_Watermarking_using_Discrete_Wavelet_and_Arnold_Transforms","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="92623147"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/92623147/Prevalence_of_depression_and_stress_among_the_first_year_students_in_Suranaree_University_of_Technology_Thailand"><img alt="Research paper thumbnail of Prevalence of depression and stress among the first year students in Suranaree University of Technology, Thailand" class="work-thumbnail" src="https://attachments.academia-assets.com/95583054/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/92623147/Prevalence_of_depression_and_stress_among_the_first_year_students_in_Suranaree_University_of_Technology_Thailand">Prevalence of depression and stress among the first year students in Suranaree University of Technology, Thailand</a></div><div class="wp-workCard_item"><span>Health Psychology Research</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The objectives of this study were to evaluate the level of depression and stress among the first-...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The objectives of this study were to evaluate the level of depression and stress among the first-year students at Suranaree University of Technology (SUT) and to compare the level of depression and stress among the samples, classified by demographic factors, including gender, domicile, and problem. This research has been approved by the SUT’s Research Ethics Committee. The study period was between July and August 2018. The online, self-report questionnaire was used as a research instrument to collect data from the sample of SUT first-year students. The total number of first-year students at SUT was 3,552 and the response rate was 65.15%. The major findings revealed that 7.0% and 51.1% of them were suffering from depression, and pathological stress, respectively. In addition, the prevalence of depression and pathological stress was higher in female samples than in other gender groups. The findings would suggest that related activities should be organized to promote students’ awarenes...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="bb7607d64ca8fcbfe592a0777f8ac7d9" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:95583054,&quot;asset_id&quot;:92623147,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/95583054/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="92623147"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="92623147"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 92623147; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=92623147]").text(description); $(".js-view-count[data-work-id=92623147]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 92623147; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='92623147']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "bb7607d64ca8fcbfe592a0777f8ac7d9" } } $('.js-work-strip[data-work-id=92623147]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":92623147,"title":"Prevalence of depression and stress among the first year students in Suranaree University of Technology, Thailand","internal_url":"https://www.academia.edu/92623147/Prevalence_of_depression_and_stress_among_the_first_year_students_in_Suranaree_University_of_Technology_Thailand","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[{"id":95583054,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/95583054/thumbnails/1.jpg","file_name":"35464-prevalence-of-depression-and-stress-among-the-first-year-students-in-suranaree-university-of-technology-thailand.pdf__filename_UTF.pdf","download_url":"https://www.academia.edu/attachments/95583054/download_file","bulk_download_file_name":"Prevalence_of_depression_and_stress_amon.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/95583054/35464-prevalence-of-depression-and-stress-among-the-first-year-students-in-suranaree-university-of-technology-thailand.pdf__filename_UTF-libre.pdf?1670775948=\u0026response-content-disposition=attachment%3B+filename%3DPrevalence_of_depression_and_stress_amon.pdf\u0026Expires=1740157310\u0026Signature=aq9HhC~XuWdRxQ2DqubrQpIYm1dpkvh6IX4TlWwa77mtsRmDzsHn3dONtfX2fL4MMpQ2Th3gPaJrV~WDQ2GnngsSL2FysjnnG~hZXx6YXtAzrnhR19GRWZEacK8Whd1rn8oS8GL1Z-9pI-eLjjVTC~ghOTpgQOUqOSL2J8qj9sfT24A5dvgDjF2BXhpGH~P3N8temLOGI3JHwhorTN1kdguzAHSJHYSIiYHeKcnBnvPsZtuUcrgma8n~1xmiBcF82O7ppjKDpmFk215x68lakGeBbDn8ZvvmvPixRbv-6G7yK91lUPzziwD6WVlcl5SKW0u42ISXubp9g8k7l~VTww__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="92623132"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/92623132/Forecast_Coral_Bleaching_by_Machine_Learnings_of_Remotely_Sensed_Geospatial_Data"><img alt="Research paper thumbnail of Forecast Coral Bleaching by Machine Learnings of Remotely Sensed Geospatial Data" class="work-thumbnail" src="https://attachments.academia-assets.com/95583046/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/92623132/Forecast_Coral_Bleaching_by_Machine_Learnings_of_Remotely_Sensed_Geospatial_Data">Forecast Coral Bleaching by Machine Learnings of Remotely Sensed Geospatial Data</a></div><div class="wp-workCard_item"><span>International Journal of Design &amp;amp; Nature and Ecodynamics</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">With the rapid changes in Earth climates, coral bleaching has been spreading worldwide and gettin...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">With the rapid changes in Earth climates, coral bleaching has been spreading worldwide and getting much severe. It is considered an imminent threat to marine animals as well as causing adverse impacts on fisheries and tourisms. Environmental agencies in affected regions have been made aware of the problem and hence starting to contain coral bleaching. Thus far, they often rely on conventional site survey to determine suitable sites to intervene and commence coral reef reviving process. With the recent advances in remote sensing technology, sea surface temperature (SST), acquired by satellites, has become a viable delegate to coral bleaching. Predicting coral bleaching based solely on SST is limited, as it is only one of many determinants. In addition, areas with different SST levels also exhibit different bleaching characteristics. Hence, area specific models are important for appropriately monitoring the events. Thus far, forecasting the bleaching based on SST alone has limited acc...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="2378e894f5ea253f6868664d178f135c" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:95583046,&quot;asset_id&quot;:92623132,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/95583046/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="92623132"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="92623132"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 92623132; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=92623132]").text(description); $(".js-view-count[data-work-id=92623132]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 92623132; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='92623132']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "2378e894f5ea253f6868664d178f135c" } } $('.js-work-strip[data-work-id=92623132]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":92623132,"title":"Forecast Coral Bleaching by Machine Learnings of Remotely Sensed Geospatial Data","internal_url":"https://www.academia.edu/92623132/Forecast_Coral_Bleaching_by_Machine_Learnings_of_Remotely_Sensed_Geospatial_Data","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[{"id":95583046,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/95583046/thumbnails/1.jpg","file_name":"17.03_13.pdf","download_url":"https://www.academia.edu/attachments/95583046/download_file","bulk_download_file_name":"Forecast_Coral_Bleaching_by_Machine_Lear.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/95583046/17.03_13-libre.pdf?1670775971=\u0026response-content-disposition=attachment%3B+filename%3DForecast_Coral_Bleaching_by_Machine_Lear.pdf\u0026Expires=1740157310\u0026Signature=V16bZ50bdOgnrCXH5UuiXsJ4nB58p20F-qF1JE2yQpwYkHTORSt63dcDd4yFuJueA8DfgdyvaoQlgFr8EQv-0WnlJ~IuKnfWP6xBLBWogpJbH8yu4Y3OOwrUiiN1Ma5c8lWHru7fmItn3CtOWv~q0LYmg2BV5eQVZpPFy5Cww~nh0hDYg~Irc~8NGbW3JBSdyr5UG7tsawTHaPq~eEHuy07JTVm0vEDZxT1y2jt7XlQHRfK1Q-iFeTb2aZJm0YLLR1dl2C6ib~kDztSsLmPeCqHh8JzcTP7tII0ce-V~WLEGQlu0TafBaR5c-HEMazUa-INOJhlR4TWv0gkvbTLANA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="86305215"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/86305215/Multimodal_Fusion_of_Deeply_Inferred_Point_Clouds_for_3D_Scene_Reconstruction_Using_Cross_Entropy_ICP"><img alt="Research paper thumbnail of Multimodal Fusion of Deeply Inferred Point Clouds for 3D Scene Reconstruction Using Cross-Entropy ICP" class="work-thumbnail" src="https://attachments.academia-assets.com/90786958/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/86305215/Multimodal_Fusion_of_Deeply_Inferred_Point_Clouds_for_3D_Scene_Reconstruction_Using_Cross_Entropy_ICP">Multimodal Fusion of Deeply Inferred Point Clouds for 3D Scene Reconstruction Using Cross-Entropy ICP</a></div><div class="wp-workCard_item"><span>IEEE Access</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Depth estimation is a crucial step toward 3D scene understanding. Most traditional systems rely o...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Depth estimation is a crucial step toward 3D scene understanding. Most traditional systems rely on direct sensing of this information by means of photogrammetry or on stereo imaging. As the scenes getting more complex, these modalities were impeded by, for instances, occlusion and imperfect lighting condition, etc. As a consequence, reconstructed surfaces are normally left with voids, due to missing data. Therefore, surface regularization is often required as post-processing. With the recent advances in deep learning, depth inference from a monocular image has attracted considerable interests. Many convolutional architectures have been proposed to infer depth information from a monocular image, with promising results. Thus far, visual cues learned and generalized by these networks may be ambiguous, resulting in inaccurate estimation. To address these issues, this paper presents an effective method for fusing point clouds extracted from depth values, directly measured by an infrared camera and estimated by a modified ResNet-50 from an RGB image, of the same scene. To ensure robustness and efficiency of finding the correspondence between and aligning these point clouds, an information theoretic alignment strategy, called CEICP, was proposed. The experimental results on a public dataset demonstrated that the proposed method outperformed its counterparts, while producing good quality surface renditions of the underlying scene.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="b8076d47800822491b50c85845b48dd8" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:90786958,&quot;asset_id&quot;:86305215,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/90786958/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="86305215"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="86305215"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 86305215; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=86305215]").text(description); $(".js-view-count[data-work-id=86305215]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 86305215; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='86305215']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "b8076d47800822491b50c85845b48dd8" } } $('.js-work-strip[data-work-id=86305215]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":86305215,"title":"Multimodal Fusion of Deeply Inferred Point Clouds for 3D Scene Reconstruction Using Cross-Entropy ICP","internal_url":"https://www.academia.edu/86305215/Multimodal_Fusion_of_Deeply_Inferred_Point_Clouds_for_3D_Scene_Reconstruction_Using_Cross_Entropy_ICP","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[{"id":90786958,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/90786958/thumbnails/1.jpg","file_name":"09834942.pdf","download_url":"https://www.academia.edu/attachments/90786958/download_file","bulk_download_file_name":"Multimodal_Fusion_of_Deeply_Inferred_Poi.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/90786958/09834942-libre.pdf?1662651866=\u0026response-content-disposition=attachment%3B+filename%3DMultimodal_Fusion_of_Deeply_Inferred_Poi.pdf\u0026Expires=1740157310\u0026Signature=MUUex-RSgFxmMFw-BbiJfbBOPwf5fyDbvNOZnOvxiif-GyLENy0q6EHEGIijKUFralOOS5NAPHffmrtANeapvotKS1G0RzTVSqhEA~6hz3hPj~TyFVxZDRX3N0vToIRRalpvPyeAsIvRN-xeJoDGFUHZzwjd9U5AOvBdGJ5DsuOoh4QzJJM2XDNY32hxwAXDyhpNY1RCOcA2Mj5O5S-eQbWMa1VvqnCl5Mqo60sJNsUhpNajwdhI6p9JJeknj8~rit5EE92sMWwI8QmtGmA00wxSgD5h-28Dr9gEu-qCv9VeBAQH8ETtkC5n39aZ1XJ3x0KNzzRgXFnCiPpMMdVsRA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="86305168"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/86305168/JavaScript_3D_graphics_library_for_agricultural_geographic_information_system"><img alt="Research paper thumbnail of JavaScript 3D graphics library for agricultural geographic information system" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/86305168/JavaScript_3D_graphics_library_for_agricultural_geographic_information_system">JavaScript 3D graphics library for agricultural geographic information system</a></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="86305168"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="86305168"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 86305168; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=86305168]").text(description); $(".js-view-count[data-work-id=86305168]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 86305168; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='86305168']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=86305168]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":86305168,"title":"JavaScript 3D graphics library for agricultural geographic information system","internal_url":"https://www.academia.edu/86305168/JavaScript_3D_graphics_library_for_agricultural_geographic_information_system","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="83191106"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/83191106/Urban_areas_extraction_from_multi_sensor_data_based_on_machine_learning_and_data_fusion"><img alt="Research paper thumbnail of Urban areas extraction from multi sensor data based on machine learning and data fusion" class="work-thumbnail" src="https://attachments.academia-assets.com/88622171/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/83191106/Urban_areas_extraction_from_multi_sensor_data_based_on_machine_learning_and_data_fusion">Urban areas extraction from multi sensor data based on machine learning and data fusion</a></div><div class="wp-workCard_item"><span>Pattern Recognition and Image Analysis</span><span>, 2017</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Accurate urban areas information is important for a variety of applications, especially city plan...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Accurate urban areas information is important for a variety of applications, especially city planning and natural disaster prediction and management. In recent years, extraction of urban structures from remotely sensed images has been extensively explored. The key advantages of this imaging modality are reduction of surveying expense and time. It also elevates restrictions on ground surveys. Thus far, much research typically extracts these structures from very high resolution satellite imagery, which are unfortunately of relatively poor spectral resolution, resulting in good precision yet moderate accuracy. Therefore, this paper investigates extraction of buildings from middle and high resolution satellite images by using spectral indices</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="e6589146b20ec34ed1a73dbfe254d87b" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:88622171,&quot;asset_id&quot;:83191106,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/88622171/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="83191106"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="83191106"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 83191106; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=83191106]").text(description); $(".js-view-count[data-work-id=83191106]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 83191106; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='83191106']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "e6589146b20ec34ed1a73dbfe254d87b" } } $('.js-work-strip[data-work-id=83191106]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":83191106,"title":"Urban areas extraction from multi sensor data based on machine learning and data fusion","internal_url":"https://www.academia.edu/83191106/Urban_areas_extraction_from_multi_sensor_data_based_on_machine_learning_and_data_fusion","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[{"id":88622171,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/88622171/thumbnails/1.jpg","file_name":"S105466181604013120220715-1-1m1fot1.pdf","download_url":"https://www.academia.edu/attachments/88622171/download_file","bulk_download_file_name":"Urban_areas_extraction_from_multi_sensor.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/88622171/S105466181604013120220715-1-1m1fot1-libre.pdf?1657889263=\u0026response-content-disposition=attachment%3B+filename%3DUrban_areas_extraction_from_multi_sensor.pdf\u0026Expires=1740157310\u0026Signature=RN46vYN2HysMNHSymXIeTFyTQICa41V4f7QSgdFFpHzL6MS0qdT2mFR0V7ZwVigZkky2cBZYhevQi9dJrHrOrXR7XuATptzC~1UqayeDxa2fy-ARA~iWc5UySTL15~t2037TpeMunsG7yal5sxhcM0-3RkKqtbaBvrOU4t-dOvOrpVwzHHlKmDqtPOCo~KPIhxEw3-RuhnXPANH4JUhV6cW6Vvjjw44BIKiiCiiKr4Kz4pc4Te3u31P1FZqhGUtd1cRoEbGJIMwFLH7LztSSli1kWPwRIae~UzH7XKTxxa5HSItLsQZgn-JY5kLdK0YXrqTvMVsAtGFJXKiE45KvJQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="78215533"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/78215533/DWT_MFCC_Feature_Extraction_for_Tile_Tapping_Sound_Classification"><img alt="Research paper thumbnail of DWT/ MFCC Feature Extraction for Tile Tapping Sound Classification" class="work-thumbnail" src="https://attachments.academia-assets.com/85340237/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/78215533/DWT_MFCC_Feature_Extraction_for_Tile_Tapping_Sound_Classification">DWT/ MFCC Feature Extraction for Tile Tapping Sound Classification</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Tile tapping sound inspection is a process of construction quality control. Hollow sound, for ins...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Tile tapping sound inspection is a process of construction quality control. Hollow sound, for instance, indicate low quality tessellation and thus voids underneath that could lead to future broken tiles. Hollow-sounding inspection was often carried out by construction specialists, whose skills and judgment may vary across individual. This paper elevates this issue and presents a Deep Learning (DL) classification method for computerized sounding tile inspection. Unlike other existing works in the area, where structural details were assessed, this study acquired tapping sound signals and analyzed them in a spectral domain by using Discrete Wavelet Transform (DWT) and Mel-frequency Cepstral Coefficients (MFCC). The dull versus hollow sounding tile were then classified based on these features by means of a Convolutional Neural Network (CNN). The experiments carried out in a laboratory tessellation indicated that the proposed method could differentiate dull from hollow-sounding tiles wit...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="80336f8904150c957c680a299770dd48" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:85340237,&quot;asset_id&quot;:78215533,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/85340237/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="78215533"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="78215533"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 78215533; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=78215533]").text(description); $(".js-view-count[data-work-id=78215533]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 78215533; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='78215533']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "80336f8904150c957c680a299770dd48" } } $('.js-work-strip[data-work-id=78215533]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":78215533,"title":"DWT/ MFCC Feature Extraction for Tile Tapping Sound Classification","internal_url":"https://www.academia.edu/78215533/DWT_MFCC_Feature_Extraction_for_Tile_Tapping_Sound_Classification","owner_id":64961290,"coauthors_can_edit":true,"owner":{"id":64961290,"first_name":"Paramate","middle_initials":null,"last_name":"Horkaew","page_name":"ParamateHorkaew","domain_name":"biotech","created_at":"2017-05-30T19:36:02.812-07:00","display_name":"Paramate Horkaew","url":"https://biotech.academia.edu/ParamateHorkaew"},"attachments":[{"id":85340237,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/85340237/thumbnails/1.jpg","file_name":"480424909.pdf","download_url":"https://www.academia.edu/attachments/85340237/download_file","bulk_download_file_name":"DWT_MFCC_Feature_Extraction_for_Tile_Tap.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/85340237/480424909-libre.pdf?1651495728=\u0026response-content-disposition=attachment%3B+filename%3DDWT_MFCC_Feature_Extraction_for_Tile_Tap.pdf\u0026Expires=1740157310\u0026Signature=fPZ95x0BtrAcv8-tBinJh2Os7Op0Gg8ras8KXsW8HUp-GteIQTTw~B~fDR8~DsU184~HJZ8xyX6IJDSoCx1ZInJKUumxXpO4g-Zays1ftL-vutdHjWcmeNVKGVL5~6qdYd-3VgledACBkDc9gGjeZr0WWhWkhn7pmsl6GUiyMGchomIGHGn0FeamuIFRPw5wFcyE33hkp6f0Vf4iv1r3D43vZ4bRRmfWUXYkRosFqCJleOv2m~fWNvCLn9Kjwzsrtp6kfsQEoTKAJngKhS9lwq4NM0i8jT2ekQo8rf2PTHLWeTpIHe8QwSCr7MC4G4VxQ~GnfBh8tW-YoepitsZDXA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> </div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/google_contacts-0dfb882d836b94dbcb4a2d123d6933fc9533eda5be911641f20b4eb428429600.js"], function() { // from javascript_helper.rb $('.js-google-connect-button').click(function(e) { e.preventDefault(); GoogleContacts.authorize_and_show_contacts(); Aedu.Dismissibles.recordClickthrough("WowProfileImportContactsPrompt"); }); $('.js-update-biography-button').click(function(e) { e.preventDefault(); Aedu.Dismissibles.recordClickthrough("UpdateUserBiographyPrompt"); $.ajax({ url: $r.api_v0_profiles_update_about_path({ subdomain_param: 'api', about: "", }), type: 'PUT', success: function(response) { location.reload(); } }); }); $('.js-work-creator-button').click(function (e) { e.preventDefault(); window.location = $r.upload_funnel_document_path({ source: encodeURIComponent(""), }); }); $('.js-video-upload-button').click(function (e) { e.preventDefault(); window.location = $r.upload_funnel_video_path({ source: encodeURIComponent(""), }); }); $('.js-do-this-later-button').click(function() { $(this).closest('.js-profile-nag-panel').remove(); Aedu.Dismissibles.recordDismissal("WowProfileImportContactsPrompt"); }); $('.js-update-biography-do-this-later-button').click(function(){ $(this).closest('.js-profile-nag-panel').remove(); Aedu.Dismissibles.recordDismissal("UpdateUserBiographyPrompt"); }); $('.wow-profile-mentions-upsell--close').click(function(){ $('.wow-profile-mentions-upsell--panel').hide(); Aedu.Dismissibles.recordDismissal("WowProfileMentionsUpsell"); }); $('.wow-profile-mentions-upsell--button').click(function(){ Aedu.Dismissibles.recordClickthrough("WowProfileMentionsUpsell"); }); new WowProfile.SocialRedesignUserWorks({ initialWorksOffset: 20, allWorksOffset: 20, maxSections: 1 }) }); </script> </div></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile_edit-5ea339ee107c863779f560dd7275595239fed73f1a13d279d2b599a28c0ecd33.js","https://a.academia-assets.com/assets/add_coauthor-22174b608f9cb871d03443cafa7feac496fb50d7df2d66a53f5ee3c04ba67f53.js","https://a.academia-assets.com/assets/tab-dcac0130902f0cc2d8cb403714dd47454f11fc6fb0e99ae6a0827b06613abc20.js","https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js"], function() { // from javascript_helper.rb window.ae = window.ae || {}; window.ae.WowProfile = window.ae.WowProfile || {}; if(Aedu.User.current && Aedu.User.current.id === $viewedUser.id) { window.ae.WowProfile.current_user_edit = {}; new WowProfileEdit.EditUploadView({ el: '.js-edit-upload-button-wrapper', model: window.$current_user, }); new AddCoauthor.AddCoauthorsController(); } var userInfoView = new WowProfile.SocialRedesignUserInfo({ recaptcha_key: "6LdxlRMTAAAAADnu_zyLhLg0YF9uACwz78shpjJB" }); WowProfile.router = new WowProfile.Router({ userInfoView: userInfoView }); Backbone.history.start({ pushState: true, root: "/" + $viewedUser.page_name }); new WowProfile.UserWorksNav() }); </script> </div> <div class="bootstrap login"><div class="modal fade login-modal" id="login-modal"><div class="login-modal-dialog modal-dialog"><div class="modal-content"><div class="modal-header"><button class="close close" data-dismiss="modal" type="button"><span aria-hidden="true">&times;</span><span class="sr-only">Close</span></button><h4 class="modal-title text-center"><strong>Log In</strong></h4></div><div class="modal-body"><div class="row"><div class="col-xs-10 col-xs-offset-1"><button class="btn btn-fb btn-lg btn-block btn-v-center-content" id="login-facebook-oauth-button"><svg style="float: left; width: 19px; line-height: 1em; margin-right: .3em;" aria-hidden="true" focusable="false" data-prefix="fab" data-icon="facebook-square" class="svg-inline--fa fa-facebook-square fa-w-14" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 512"><path fill="currentColor" d="M400 32H48A48 48 0 0 0 0 80v352a48 48 0 0 0 48 48h137.25V327.69h-63V256h63v-54.64c0-62.15 37-96.48 93.67-96.48 27.14 0 55.52 4.84 55.52 4.84v61h-31.27c-30.81 0-40.42 19.12-40.42 38.73V256h68.78l-11 71.69h-57.78V480H400a48 48 0 0 0 48-48V80a48 48 0 0 0-48-48z"></path></svg><small><strong>Log in</strong> with <strong>Facebook</strong></small></button><br /><button class="btn btn-google btn-lg btn-block btn-v-center-content" id="login-google-oauth-button"><svg style="float: left; width: 22px; line-height: 1em; margin-right: .3em;" aria-hidden="true" focusable="false" data-prefix="fab" data-icon="google-plus" class="svg-inline--fa fa-google-plus fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M256,8C119.1,8,8,119.1,8,256S119.1,504,256,504,504,392.9,504,256,392.9,8,256,8ZM185.3,380a124,124,0,0,1,0-248c31.3,0,60.1,11,83,32.3l-33.6,32.6c-13.2-12.9-31.3-19.1-49.4-19.1-42.9,0-77.2,35.5-77.2,78.1S142.3,334,185.3,334c32.6,0,64.9-19.1,70.1-53.3H185.3V238.1H302.2a109.2,109.2,0,0,1,1.9,20.7c0,70.8-47.5,121.2-118.8,121.2ZM415.5,273.8v35.5H380V273.8H344.5V238.3H380V202.8h35.5v35.5h35.2v35.5Z"></path></svg><small><strong>Log in</strong> with <strong>Google</strong></small></button><br /><style type="text/css">.sign-in-with-apple-button { width: 100%; height: 52px; border-radius: 3px; border: 1px solid black; cursor: pointer; } .sign-in-with-apple-button > div { margin: 0 auto; / This centers the Apple-rendered button horizontally }</style><script src="https://appleid.cdn-apple.com/appleauth/static/jsapi/appleid/1/en_US/appleid.auth.js" type="text/javascript"></script><div class="sign-in-with-apple-button" data-border="false" data-color="white" id="appleid-signin"><span &nbsp;&nbsp;="Sign Up with Apple" class="u-fs11"></span></div><script>AppleID.auth.init({ clientId: 'edu.academia.applesignon', scope: 'name email', redirectURI: 'https://www.academia.edu/sessions', state: "92872d999780286f34de15c740f2ab1dce5ee9bfa9f24a4b257ca1f8f2c099d8", });</script><script>// Hacky way of checking if on fast loswp if (window.loswp == null) { (function() { const Google = window?.Aedu?.Auth?.OauthButton?.Login?.Google; const Facebook = window?.Aedu?.Auth?.OauthButton?.Login?.Facebook; if (Google) { new Google({ el: '#login-google-oauth-button', rememberMeCheckboxId: 'remember_me', track: null }); } if (Facebook) { new Facebook({ el: '#login-facebook-oauth-button', rememberMeCheckboxId: 'remember_me', track: null }); } })(); }</script></div></div></div><div class="modal-body"><div class="row"><div class="col-xs-10 col-xs-offset-1"><div class="hr-heading login-hr-heading"><span class="hr-heading-text">or</span></div></div></div></div><div class="modal-body"><div class="row"><div class="col-xs-10 col-xs-offset-1"><form class="js-login-form" action="https://www.academia.edu/sessions" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="iIXjAVSQnbqfuT35gcWxZW5Qd_6dKEHjyDHekDBV5yXrHRhIRMOz6FKdlX3eM1s9slR1FM6HtmMBgq8iTsR-MQ" autocomplete="off" /><div class="form-group"><label class="control-label" for="login-modal-email-input" style="font-size: 14px;">Email</label><input class="form-control" id="login-modal-email-input" name="login" type="email" /></div><div class="form-group"><label class="control-label" for="login-modal-password-input" style="font-size: 14px;">Password</label><input class="form-control" id="login-modal-password-input" name="password" type="password" /></div><input type="hidden" name="post_login_redirect_url" id="post_login_redirect_url" value="https://biotech.academia.edu/ParamateHorkaew" autocomplete="off" /><div class="checkbox"><label><input type="checkbox" name="remember_me" id="remember_me" value="1" checked="checked" /><small style="font-size: 12px; margin-top: 2px; display: inline-block;">Remember me on this computer</small></label></div><br><input type="submit" name="commit" value="Log In" class="btn btn-primary btn-block btn-lg js-login-submit" data-disable-with="Log In" /></br></form><script>typeof window?.Aedu?.recaptchaManagedForm === 'function' && window.Aedu.recaptchaManagedForm( document.querySelector('.js-login-form'), document.querySelector('.js-login-submit') );</script><small style="font-size: 12px;"><br />or <a data-target="#login-modal-reset-password-container" data-toggle="collapse" href="javascript:void(0)">reset password</a></small><div class="collapse" id="login-modal-reset-password-container"><br /><div class="well margin-0x"><form class="js-password-reset-form" action="https://www.academia.edu/reset_password" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="VMVp21ObAKaxdI8Iphm_6Y6wsmUKnjyiPua6smYC9p83XZKSQ8gu9HxQJ4z571WxUrSwj1kxyyL3VcsAGJNviw" autocomplete="off" /><p>Enter the email address you signed up with and we&#39;ll email you a reset link.</p><div class="form-group"><input class="form-control" name="email" type="email" /></div><script src="https://recaptcha.net/recaptcha/api.js" async defer></script> <script> var invisibleRecaptchaSubmit = function () { var closestForm = function (ele) { var curEle = ele.parentNode; while (curEle.nodeName !== 'FORM' && curEle.nodeName !== 'BODY'){ curEle = curEle.parentNode; } return curEle.nodeName === 'FORM' ? curEle : null }; var eles = document.getElementsByClassName('g-recaptcha'); if (eles.length > 0) { var form = closestForm(eles[0]); if (form) { form.submit(); } } }; </script> <input type="submit" data-sitekey="6Lf3KHUUAAAAACggoMpmGJdQDtiyrjVlvGJ6BbAj" data-callback="invisibleRecaptchaSubmit" class="g-recaptcha btn btn-primary btn-block" value="Email me a link" value=""/> </form></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/collapse-45805421cf446ca5adf7aaa1935b08a3a8d1d9a6cc5d91a62a2a3a00b20b3e6a.js"], function() { // from javascript_helper.rb $("#login-modal-reset-password-container").on("shown.bs.collapse", function() { $(this).find("input[type=email]").focus(); }); }); </script> </div></div></div><div class="modal-footer"><div class="text-center"><small style="font-size: 12px;">Need an account?&nbsp;<a rel="nofollow" href="https://www.academia.edu/signup">Click here to sign up</a></small></div></div></div></div></div></div><script>// If we are on subdomain or non-bootstrapped page, redirect to login page instead of showing modal (function(){ if (typeof $ === 'undefined') return; var host = window.location.hostname; if ((host === $domain || host === "www."+$domain) && (typeof $().modal === 'function')) { $("#nav_log_in").click(function(e) { // Don't follow the link and open the modal e.preventDefault(); $("#login-modal").on('shown.bs.modal', function() { $(this).find("#login-modal-email-input").focus() }).modal('show'); }); } })()</script> <div class="bootstrap" id="footer"><div class="footer-content clearfix text-center padding-top-7x" style="width:100%;"><ul class="footer-links-secondary footer-links-wide list-inline margin-bottom-1x"><li><a href="https://www.academia.edu/about">About</a></li><li><a href="https://www.academia.edu/press">Press</a></li><li><a href="https://www.academia.edu/documents">Papers</a></li><li><a href="https://www.academia.edu/topics">Topics</a></li><li><a href="https://www.academia.edu/journals">Academia.edu Journals</a></li><li><a rel="nofollow" href="https://www.academia.edu/hiring"><svg style="width: 13px; height: 13px;" aria-hidden="true" focusable="false" data-prefix="fas" data-icon="briefcase" class="svg-inline--fa fa-briefcase fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M320 336c0 8.84-7.16 16-16 16h-96c-8.84 0-16-7.16-16-16v-48H0v144c0 25.6 22.4 48 48 48h416c25.6 0 48-22.4 48-48V288H320v48zm144-208h-80V80c0-25.6-22.4-48-48-48H176c-25.6 0-48 22.4-48 48v48H48c-25.6 0-48 22.4-48 48v80h512v-80c0-25.6-22.4-48-48-48zm-144 0H192V96h128v32z"></path></svg>&nbsp;<strong>We're Hiring!</strong></a></li><li><a rel="nofollow" href="https://support.academia.edu/hc/en-us"><svg style="width: 12px; height: 12px;" aria-hidden="true" focusable="false" data-prefix="fas" data-icon="question-circle" class="svg-inline--fa fa-question-circle fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M504 256c0 136.997-111.043 248-248 248S8 392.997 8 256C8 119.083 119.043 8 256 8s248 111.083 248 248zM262.655 90c-54.497 0-89.255 22.957-116.549 63.758-3.536 5.286-2.353 12.415 2.715 16.258l34.699 26.31c5.205 3.947 12.621 3.008 16.665-2.122 17.864-22.658 30.113-35.797 57.303-35.797 20.429 0 45.698 13.148 45.698 32.958 0 14.976-12.363 22.667-32.534 33.976C247.128 238.528 216 254.941 216 296v4c0 6.627 5.373 12 12 12h56c6.627 0 12-5.373 12-12v-1.333c0-28.462 83.186-29.647 83.186-106.667 0-58.002-60.165-102-116.531-102zM256 338c-25.365 0-46 20.635-46 46 0 25.364 20.635 46 46 46s46-20.636 46-46c0-25.365-20.635-46-46-46z"></path></svg>&nbsp;<strong>Help Center</strong></a></li></ul><ul class="footer-links-tertiary list-inline margin-bottom-1x"><li class="small">Find new research papers in:</li><li class="small"><a href="https://www.academia.edu/Documents/in/Physics">Physics</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Chemistry">Chemistry</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Biology">Biology</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Health_Sciences">Health Sciences</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Ecology">Ecology</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Earth_Sciences">Earth Sciences</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Cognitive_Science">Cognitive Science</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Mathematics">Mathematics</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Computer_Science">Computer Science</a></li></ul></div></div><div class="DesignSystem" id="credit" style="width:100%;"><ul class="u-pl0x footer-links-legal list-inline"><li><a rel="nofollow" href="https://www.academia.edu/terms">Terms</a></li><li><a rel="nofollow" href="https://www.academia.edu/privacy">Privacy</a></li><li><a rel="nofollow" href="https://www.academia.edu/copyright">Copyright</a></li><li>Academia &copy;2025</li></ul></div><script> //<![CDATA[ window.detect_gmtoffset = true; window.Academia && window.Academia.set_gmtoffset && Academia.set_gmtoffset('/gmtoffset'); //]]> </script> <div id='overlay_background'></div> <div id='bootstrap-modal-container' class='bootstrap'></div> <div id='ds-modal-container' class='bootstrap DesignSystem'></div> <div id='full-screen-modal'></div> </div> </body> </html>

Pages: 1 2 3 4 5 6 7 8 9 10