CINXE.COM

Fakhreddine Ababsa | Ecole Nationale Supérieure arts et metiers - Academia.edu

<!DOCTYPE html> <html lang="en" xmlns:fb="http://www.facebook.com/2008/fbml" class="wf-loading"> <head prefix="og: https://ogp.me/ns# fb: https://ogp.me/ns/fb# academia: https://ogp.me/ns/fb/academia#"> <meta charset="utf-8"> <meta name=viewport content="width=device-width, initial-scale=1"> <meta rel="search" type="application/opensearchdescription+xml" href="/open_search.xml" title="Academia.edu"> <title>Fakhreddine Ababsa | Ecole Nationale Supérieure arts et metiers - Academia.edu</title> <!-- _ _ _ | | (_) | | __ _ ___ __ _ __| | ___ _ __ ___ _ __ _ ___ __| |_ _ / _` |/ __/ _` |/ _` |/ _ \ '_ ` _ \| |/ _` | / _ \/ _` | | | | | (_| | (_| (_| | (_| | __/ | | | | | | (_| || __/ (_| | |_| | \__,_|\___\__,_|\__,_|\___|_| |_| |_|_|\__,_(_)___|\__,_|\__,_| We're hiring! See https://www.academia.edu/hiring --> <link href="//a.academia-assets.com/images/favicons/favicon-production.ico" rel="shortcut icon" type="image/vnd.microsoft.icon"> <link rel="apple-touch-icon" sizes="57x57" href="//a.academia-assets.com/images/favicons/apple-touch-icon-57x57.png"> <link rel="apple-touch-icon" sizes="60x60" href="//a.academia-assets.com/images/favicons/apple-touch-icon-60x60.png"> <link rel="apple-touch-icon" sizes="72x72" href="//a.academia-assets.com/images/favicons/apple-touch-icon-72x72.png"> <link rel="apple-touch-icon" sizes="76x76" href="//a.academia-assets.com/images/favicons/apple-touch-icon-76x76.png"> <link rel="apple-touch-icon" sizes="114x114" href="//a.academia-assets.com/images/favicons/apple-touch-icon-114x114.png"> <link rel="apple-touch-icon" sizes="120x120" href="//a.academia-assets.com/images/favicons/apple-touch-icon-120x120.png"> <link rel="apple-touch-icon" sizes="144x144" href="//a.academia-assets.com/images/favicons/apple-touch-icon-144x144.png"> <link rel="apple-touch-icon" sizes="152x152" href="//a.academia-assets.com/images/favicons/apple-touch-icon-152x152.png"> <link rel="apple-touch-icon" sizes="180x180" href="//a.academia-assets.com/images/favicons/apple-touch-icon-180x180.png"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-32x32.png" sizes="32x32"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-194x194.png" sizes="194x194"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-96x96.png" sizes="96x96"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/android-chrome-192x192.png" sizes="192x192"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-16x16.png" sizes="16x16"> <link rel="manifest" href="//a.academia-assets.com/images/favicons/manifest.json"> <meta name="msapplication-TileColor" content="#2b5797"> <meta name="msapplication-TileImage" content="//a.academia-assets.com/images/favicons/mstile-144x144.png"> <meta name="theme-color" content="#ffffff"> <script> window.performance && window.performance.measure && window.performance.measure("Time To First Byte", "requestStart", "responseStart"); </script> <script> (function() { if (!window.URLSearchParams || !window.history || !window.history.replaceState) { return; } var searchParams = new URLSearchParams(window.location.search); var paramsToDelete = [ 'fs', 'sm', 'swp', 'iid', 'nbs', 'rcc', // related content category 'rcpos', // related content carousel position 'rcpg', // related carousel page 'rchid', // related content hit id 'f_ri', // research interest id, for SEO tracking 'f_fri', // featured research interest, for SEO tracking (param key without value) 'f_rid', // from research interest directory for SEO tracking 'f_loswp', // from research interest pills on LOSWP sidebar for SEO tracking 'rhid', // referrring hit id ]; if (paramsToDelete.every((key) => searchParams.get(key) === null)) { return; } paramsToDelete.forEach((key) => { searchParams.delete(key); }); var cleanUrl = new URL(window.location.href); cleanUrl.search = searchParams.toString(); history.replaceState({}, document.title, cleanUrl); })(); </script> <script async src="https://www.googletagmanager.com/gtag/js?id=G-5VKX33P2DS"></script> <script> window.dataLayer = window.dataLayer || []; function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-5VKX33P2DS', { cookie_domain: 'academia.edu', send_page_view: false, }); gtag('event', 'page_view', { 'controller': "profiles/works", 'action': "summary", 'controller_action': 'profiles/works#summary', 'logged_in': 'false', 'edge': 'unknown', // Send nil if there is no A/B test bucket, in case some records get logged // with missing data - that way we can distinguish between the two cases. // ab_test_bucket should be of the form <ab_test_name>:<bucket> 'ab_test_bucket': null, }) </script> <script type="text/javascript"> window.sendUserTiming = function(timingName) { if (!(window.performance && window.performance.measure)) return; var entries = window.performance.getEntriesByName(timingName, "measure"); if (entries.length !== 1) return; var timingValue = Math.round(entries[0].duration); gtag('event', 'timing_complete', { name: timingName, value: timingValue, event_category: 'User-centric', }); }; window.sendUserTiming("Time To First Byte"); </script> <meta name="csrf-param" content="authenticity_token" /> <meta name="csrf-token" content="SXWofON/6n5JBrb2fn2/uMOiTqfUXX6KBCWSQXJzvOGrQT0Gv/KNd4HcroiXl62E5tb4ux5FUwLcbe1Usnb2AA==" /> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/wow-77f7b87cb1583fc59aa8f94756ebfe913345937eb932042b4077563bebb5fb4b.css" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/social/home-9e8218e1301001388038e3fc3427ed00d079a4760ff7745d1ec1b2d59103170a.css" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/heading-b2b823dd904da60a48fd1bfa1defd840610c2ff414d3f39ed3af46277ab8df3b.css" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/button-3cea6e0ad4715ed965c49bfb15dedfc632787b32ff6d8c3a474182b231146ab7.css" /><link crossorigin="" href="https://fonts.gstatic.com/" rel="preconnect" /><link href="https://fonts.googleapis.com/css2?family=DM+Sans:ital,opsz,wght@0,9..40,100..1000;1,9..40,100..1000&amp;family=Gupter:wght@400;500;700&amp;family=IBM+Plex+Mono:wght@300;400&amp;family=Material+Symbols+Outlined:opsz,wght,FILL,GRAD@20,400,0,0&amp;display=swap" rel="stylesheet" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/common-10fa40af19d25203774df2d4a03b9b5771b45109c2304968038e88a81d1215c5.css" /> <meta name="author" content="fakhreddine ababsa" /> <meta name="description" content="Fakhreddine Ababsa, Ecole Nationale Supérieure arts et metiers: 5 Followers, 2 Following, 76 Research papers. Research interests: Eye tracking, Computer…" /> <meta name="google-site-verification" content="bKJMBZA7E43xhDOopFZkssMMkBRjvYERV-NaN4R6mrs" /> <script> var $controller_name = 'works'; var $action_name = "summary"; var $rails_env = 'production'; var $app_rev = '08c611e06efd956c8cb6c100cf9e757ee1b93f16'; var $domain = 'academia.edu'; var $app_host = "academia.edu"; var $asset_host = "academia-assets.com"; var $start_time = new Date().getTime(); var $recaptcha_key = "6LdxlRMTAAAAADnu_zyLhLg0YF9uACwz78shpjJB"; var $recaptcha_invisible_key = "6Lf3KHUUAAAAACggoMpmGJdQDtiyrjVlvGJ6BbAj"; var $disableClientRecordHit = false; </script> <script> window.Aedu = { hit_data: null }; window.Aedu.SiteStats = {"premium_universities_count":15265,"monthly_visitors":"113 million","monthly_visitor_count":113485689,"monthly_visitor_count_in_millions":113,"user_count":277892295,"paper_count":55203019,"paper_count_in_millions":55,"page_count":432000000,"page_count_in_millions":432,"pdf_count":16500000,"pdf_count_in_millions":16}; window.Aedu.serverRenderTime = new Date(1733197881000); window.Aedu.timeDifference = new Date().getTime() - 1733197881000; window.Aedu.isUsingCssV1 = false; window.Aedu.enableLocalization = true; window.Aedu.activateFullstory = false; window.Aedu.serviceAvailability = { status: {"attention_db":"on","bibliography_db":"on","contacts_db":"on","email_db":"on","indexability_db":"on","mentions_db":"on","news_db":"on","notifications_db":"on","offsite_mentions_db":"on","redshift":"on","redshift_exports_db":"on","related_works_db":"on","ring_db":"on","user_tests_db":"on"}, serviceEnabled: function(service) { return this.status[service] === "on"; }, readEnabled: function(service) { return this.serviceEnabled(service) || this.status[service] === "read_only"; }, }; window.Aedu.viewApmTrace = function() { // Check if x-apm-trace-id meta tag is set, and open the trace in APM // in a new window if it is. var apmTraceId = document.head.querySelector('meta[name="x-apm-trace-id"]'); if (apmTraceId) { var traceId = apmTraceId.content; // Use trace ID to construct URL, an example URL looks like: // https://app.datadoghq.com/apm/traces?query=trace_id%31298410148923562634 var apmUrl = 'https://app.datadoghq.com/apm/traces?query=trace_id%3A' + traceId; window.open(apmUrl, '_blank'); } }; </script> <!--[if lt IE 9]> <script src="//cdnjs.cloudflare.com/ajax/libs/html5shiv/3.7.2/html5shiv.min.js"></script> <![endif]--> <link href="https://fonts.googleapis.com/css?family=Roboto:100,100i,300,300i,400,400i,500,500i,700,700i,900,900i" rel="stylesheet"> <link href="//maxcdn.bootstrapcdn.com/font-awesome/4.3.0/css/font-awesome.min.css" rel="stylesheet"> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/libraries-a9675dcb01ec4ef6aa807ba772c7a5a00c1820d3ff661c1038a20f80d06bb4e4.css" /> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/academia-bdb9e8c097f01e611f2fc5e2f1a9dc599beede975e2ae5629983543a1726e947.css" /> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system_legacy-056a9113b9a0f5343d013b29ee1929d5a18be35fdcdceb616600b4db8bd20054.css" /> <script src="//a.academia-assets.com/assets/webpack_bundles/runtime-bundle-005434038af4252ca37c527588411a3d6a0eabb5f727fac83f8bbe7fd88d93bb.js"></script> <script src="//a.academia-assets.com/assets/webpack_bundles/webpack_libraries_and_infrequently_changed.wjs-bundle-bad3fa1257b860f4633ff1db966aa3ae7dfe1980708675f3e2488742c1a0d941.js"></script> <script src="//a.academia-assets.com/assets/webpack_bundles/core_webpack.wjs-bundle-34a3460a4873b743570e35df99c7839e3ddd9a3d06ef96c9fe38311a96a8a24e.js"></script> <script src="//a.academia-assets.com/assets/webpack_bundles/sentry.wjs-bundle-5fe03fddca915c8ba0f7edbe64c194308e8ce5abaed7bffe1255ff37549c4808.js"></script> <script> jade = window.jade || {}; jade.helpers = window.$h; jade._ = window._; </script> <!-- Google Tag Manager --> <script id="tag-manager-head-root">(function(w,d,s,l,i){w[l]=w[l]||[];w[l].push({'gtm.start': new Date().getTime(),event:'gtm.js'});var f=d.getElementsByTagName(s)[0], j=d.createElement(s),dl=l!='dataLayer'?'&l='+l:'';j.async=true;j.src= 'https://www.googletagmanager.com/gtm.js?id='+i+dl;f.parentNode.insertBefore(j,f); })(window,document,'script','dataLayer_old','GTM-5G9JF7Z');</script> <!-- End Google Tag Manager --> <script> window.gptadslots = []; window.googletag = window.googletag || {}; window.googletag.cmd = window.googletag.cmd || []; </script> <script type="text/javascript"> // TODO(jacob): This should be defined, may be rare load order problem. // Checking if null is just a quick fix, will default to en if unset. // Better fix is to run this immedietely after I18n is set. if (window.I18n != null) { I18n.defaultLocale = "en"; I18n.locale = "en"; I18n.fallbacks = true; } </script> <link rel="canonical" href="https://gadz.academia.edu/FakhreddineAbabsa" /> </head> <!--[if gte IE 9 ]> <body class='ie ie9 c-profiles/works a-summary logged_out'> <![endif]--> <!--[if !(IE) ]><!--> <body class='c-profiles/works a-summary logged_out'> <!--<![endif]--> <div id="fb-root"></div><script>window.fbAsyncInit = function() { FB.init({ appId: "2369844204", version: "v8.0", status: true, cookie: true, xfbml: true }); // Additional initialization code. if (window.InitFacebook) { // facebook.ts already loaded, set it up. window.InitFacebook(); } else { // Set a flag for facebook.ts to find when it loads. window.academiaAuthReadyFacebook = true; } };</script><script>window.fbAsyncLoad = function() { // Protection against double calling of this function if (window.FB) { return; } (function(d, s, id){ var js, fjs = d.getElementsByTagName(s)[0]; if (d.getElementById(id)) {return;} js = d.createElement(s); js.id = id; js.src = "//connect.facebook.net/en_US/sdk.js"; fjs.parentNode.insertBefore(js, fjs); }(document, 'script', 'facebook-jssdk')); } if (!window.defer_facebook) { // Autoload if not deferred window.fbAsyncLoad(); } else { // Defer loading by 5 seconds setTimeout(function() { window.fbAsyncLoad(); }, 5000); }</script> <div id="google-root"></div><script>window.loadGoogle = function() { if (window.InitGoogle) { // google.ts already loaded, set it up. window.InitGoogle("331998490334-rsn3chp12mbkiqhl6e7lu2q0mlbu0f1b"); } else { // Set a flag for google.ts to use when it loads. window.GoogleClientID = "331998490334-rsn3chp12mbkiqhl6e7lu2q0mlbu0f1b"; } };</script><script>window.googleAsyncLoad = function() { // Protection against double calling of this function (function(d) { var js; var id = 'google-jssdk'; var ref = d.getElementsByTagName('script')[0]; if (d.getElementById(id)) { return; } js = d.createElement('script'); js.id = id; js.async = true; js.onload = loadGoogle; js.src = "https://accounts.google.com/gsi/client" ref.parentNode.insertBefore(js, ref); }(document)); } if (!window.defer_google) { // Autoload if not deferred window.googleAsyncLoad(); } else { // Defer loading by 5 seconds setTimeout(function() { window.googleAsyncLoad(); }, 5000); }</script> <div id="tag-manager-body-root"> <!-- Google Tag Manager (noscript) --> <noscript><iframe src="https://www.googletagmanager.com/ns.html?id=GTM-5G9JF7Z" height="0" width="0" style="display:none;visibility:hidden"></iframe></noscript> <!-- End Google Tag Manager (noscript) --> <!-- Event listeners for analytics --> <script> window.addEventListener('load', function() { if (document.querySelector('input[name="commit"]')) { document.querySelector('input[name="commit"]').addEventListener('click', function() { gtag('event', 'click', { event_category: 'button', event_label: 'Log In' }) }) } }); </script> </div> <script>var _comscore = _comscore || []; _comscore.push({ c1: "2", c2: "26766707" }); (function() { var s = document.createElement("script"), el = document.getElementsByTagName("script")[0]; s.async = true; s.src = (document.location.protocol == "https:" ? "https://sb" : "http://b") + ".scorecardresearch.com/beacon.js"; el.parentNode.insertBefore(s, el); })();</script><img src="https://sb.scorecardresearch.com/p?c1=2&amp;c2=26766707&amp;cv=2.0&amp;cj=1" style="position: absolute; visibility: hidden" /> <div id='react-modal'></div> <div class='DesignSystem'> <a class='u-showOnFocus' href='#site'> Skip to main content </a> </div> <div id="upgrade_ie_banner" style="display: none;"><p>Academia.edu no longer supports Internet Explorer.</p><p>To browse Academia.edu and the wider internet faster and more securely, please take a few seconds to&nbsp;<a href="https://www.academia.edu/upgrade-browser">upgrade your browser</a>.</p></div><script>// Show this banner for all versions of IE if (!!window.MSInputMethodContext || /(MSIE)/.test(navigator.userAgent)) { document.getElementById('upgrade_ie_banner').style.display = 'block'; }</script> <div class="DesignSystem bootstrap ShrinkableNav"><div class="navbar navbar-default main-header"><div class="container-wrapper" id="main-header-container"><div class="container"><div class="navbar-header"><div class="nav-left-wrapper u-mt0x"><div class="nav-logo"><a data-main-header-link-target="logo_home" href="https://www.academia.edu/"><img class="visible-xs-inline-block" style="height: 24px;" alt="Academia.edu" src="//a.academia-assets.com/images/academia-logo-redesign-2015-A.svg" width="24" height="24" /><img width="145.2" height="18" class="hidden-xs" style="height: 24px;" alt="Academia.edu" src="//a.academia-assets.com/images/academia-logo-redesign-2015.svg" /></a></div><div class="nav-search"><div class="SiteSearch-wrapper select2-no-default-pills"><form class="js-SiteSearch-form DesignSystem" action="https://www.academia.edu/search" accept-charset="UTF-8" method="get"><input name="utf8" type="hidden" value="&#x2713;" autocomplete="off" /><i class="SiteSearch-icon fa fa-search u-fw700 u-positionAbsolute u-tcGrayDark"></i><input class="js-SiteSearch-form-input SiteSearch-form-input form-control" data-main-header-click-target="search_input" name="q" placeholder="Search" type="text" value="" /></form></div></div></div><div class="nav-right-wrapper pull-right"><ul class="NavLinks js-main-nav list-unstyled"><li class="NavLinks-link"><a class="js-header-login-url Button Button--inverseGray Button--sm u-mb4x" id="nav_log_in" rel="nofollow" href="https://www.academia.edu/login">Log In</a></li><li class="NavLinks-link u-p0x"><a class="Button Button--inverseGray Button--sm u-mb4x" rel="nofollow" href="https://www.academia.edu/signup">Sign Up</a></li></ul><button class="hidden-lg hidden-md hidden-sm u-ml4x navbar-toggle collapsed" data-target=".js-mobile-header-links" data-toggle="collapse" type="button"><span class="icon-bar"></span><span class="icon-bar"></span><span class="icon-bar"></span></button></div></div><div class="collapse navbar-collapse js-mobile-header-links"><ul class="nav navbar-nav"><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/login">Log In</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/signup">Sign Up</a></li><li class="u-borderColorGrayLight u-borderBottom1 js-mobile-nav-expand-trigger"><a href="#">more&nbsp<span class="caret"></span></a></li><li><ul class="js-mobile-nav-expand-section nav navbar-nav u-m0x collapse"><li class="u-borderColorGrayLight u-borderBottom1"><a rel="false" href="https://www.academia.edu/about">About</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/press">Press</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://medium.com/@academia">Blog</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="false" href="https://www.academia.edu/documents">Papers</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/terms">Terms</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/privacy">Privacy</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/copyright">Copyright</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/hiring"><i class="fa fa-briefcase"></i>&nbsp;We're Hiring!</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://support.academia.edu/"><i class="fa fa-question-circle"></i>&nbsp;Help Center</a></li><li class="js-mobile-nav-collapse-trigger u-borderColorGrayLight u-borderBottom1 dropup" style="display:none"><a href="#">less&nbsp<span class="caret"></span></a></li></ul></li></ul></div></div></div><script>(function(){ var $moreLink = $(".js-mobile-nav-expand-trigger"); var $lessLink = $(".js-mobile-nav-collapse-trigger"); var $section = $('.js-mobile-nav-expand-section'); $moreLink.click(function(ev){ ev.preventDefault(); $moreLink.hide(); $lessLink.show(); $section.collapse('show'); }); $lessLink.click(function(ev){ ev.preventDefault(); $moreLink.show(); $lessLink.hide(); $section.collapse('hide'); }); })() if ($a.is_logged_in() || false) { new Aedu.NavigationController({ el: '.js-main-nav', showHighlightedNotification: false }); } else { $(".js-header-login-url").attr("href", $a.loginUrlWithRedirect()); } Aedu.autocompleteSearch = new AutocompleteSearch({el: '.js-SiteSearch-form'});</script></div></div> <div id='site' class='fixed'> <div id="content" class="clearfix"> <script>document.addEventListener('DOMContentLoaded', function(){ var $dismissible = $(".dismissible_banner"); $dismissible.click(function(ev) { $dismissible.hide(); }); });</script> <script src="//a.academia-assets.com/assets/webpack_bundles/profile.wjs-bundle-d50db999ccdab527b1e040b4cc8af62d4a8254a0385f5004e234635ba055442a.js" defer="defer"></script><script>Aedu.rankings = { showPaperRankingsLink: false } $viewedUser = Aedu.User.set_viewed( {"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa","photo":"https://0.academia-photos.com/263837202/115339392/104625007/s65_fakhreddine.ababsa.png","has_photo":true,"department":{"id":1377998,"name":"Paris","url":"https://gadz.academia.edu/Departments/Paris/Documents","university":{"id":105973,"name":"Ecole Nationale Supérieure arts et metiers","url":"https://gadz.academia.edu/"}},"position":"Faculty Member","position_id":1,"is_analytics_public":false,"interests":[{"id":1755,"name":"Eye tracking","url":"https://www.academia.edu/Documents/in/Eye_tracking"},{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":91839,"name":"Paris","url":"https://www.academia.edu/Documents/in/Paris"}]} ); if ($a.is_logged_in() && $viewedUser.is_current_user()) { $('body').addClass('profile-viewed-by-owner'); } $socialProfiles = []</script><div id="js-react-on-rails-context" style="display:none" data-rails-context="{&quot;inMailer&quot;:false,&quot;i18nLocale&quot;:&quot;en&quot;,&quot;i18nDefaultLocale&quot;:&quot;en&quot;,&quot;href&quot;:&quot;https://gadz.academia.edu/FakhreddineAbabsa&quot;,&quot;location&quot;:&quot;/FakhreddineAbabsa&quot;,&quot;scheme&quot;:&quot;https&quot;,&quot;host&quot;:&quot;gadz.academia.edu&quot;,&quot;port&quot;:null,&quot;pathname&quot;:&quot;/FakhreddineAbabsa&quot;,&quot;search&quot;:null,&quot;httpAcceptLanguage&quot;:null,&quot;serverSide&quot;:false}"></div> <div class="js-react-on-rails-component" style="display:none" data-component-name="ProfileCheckPaperUpdate" data-props="{}" data-trace="false" data-dom-id="ProfileCheckPaperUpdate-react-component-15d84aff-d75f-4d6a-b1bc-ed3912bc2dc6"></div> <div id="ProfileCheckPaperUpdate-react-component-15d84aff-d75f-4d6a-b1bc-ed3912bc2dc6"></div> <div class="DesignSystem"><div class="onsite-ping" id="onsite-ping"></div></div><div class="profile-user-info DesignSystem"><div class="social-profile-container"><div class="left-panel-container"><div class="user-info-component-wrapper"><div class="user-summary-cta-container"><div class="user-summary-container"><div class="social-profile-avatar-container"><img class="profile-avatar u-positionAbsolute" alt="Fakhreddine Ababsa" border="0" onerror="if (this.src != &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;) this.src = &#39;//a.academia-assets.com/images/s200_no_pic.png&#39;;" width="200" height="200" src="https://0.academia-photos.com/263837202/115339392/104625007/s200_fakhreddine.ababsa.png" /></div><div class="title-container"><h1 class="ds2-5-heading-sans-serif-sm">Fakhreddine Ababsa</h1><div class="affiliations-container fake-truncate js-profile-affiliations"><div><a class="u-tcGrayDarker" href="https://gadz.academia.edu/">Ecole Nationale Supérieure arts et metiers</a>, <a class="u-tcGrayDarker" href="https://gadz.academia.edu/Departments/Paris/Documents">Paris</a>, <span class="u-tcGrayDarker">Faculty Member</span></div></div></div></div><div class="sidebar-cta-container"><button class="ds2-5-button hidden profile-cta-button grow js-profile-follow-button" data-broccoli-component="user-info.follow-button" data-click-track="profile-user-info-follow-button" data-follow-user-fname="Fakhreddine" data-follow-user-id="263837202" data-follow-user-source="profile_button" data-has-google="false"><span class="material-symbols-outlined" style="font-size: 20px" translate="no">add</span>Follow</button><button class="ds2-5-button hidden profile-cta-button grow js-profile-unfollow-button" data-broccoli-component="user-info.unfollow-button" data-click-track="profile-user-info-unfollow-button" data-unfollow-user-id="263837202"><span class="material-symbols-outlined" style="font-size: 20px" translate="no">done</span>Following</button></div></div><div class="user-stats-container"><a><div class="stat-container js-profile-followers"><p class="label">Followers</p><p class="data">5</p></div></a><a><div class="stat-container js-profile-followees" data-broccoli-component="user-info.followees-count" data-click-track="profile-expand-user-info-following"><p class="label">Following</p><p class="data">2</p></div></a><a><div class="stat-container js-profile-coauthors" data-broccoli-component="user-info.coauthors-count" data-click-track="profile-expand-user-info-coauthors"><p class="label">Co-author</p><p class="data">1</p></div></a><a href="/FakhreddineAbabsa/mentions"><div class="stat-container"><p class="label">Mentions</p><p class="data">1</p></div></a><span><div class="stat-container"><p class="label"><span class="js-profile-total-view-text">Public Views</span></p><p class="data"><span class="js-profile-view-count"></span></p></div></span></div><div class="ri-section"><div class="ri-section-header"><span>Interests</span></div><div class="ri-tags-container"><a data-click-track="profile-user-info-expand-research-interests" data-has-card-for-ri-list="263837202" href="https://www.academia.edu/Documents/in/Eye_tracking"><div id="js-react-on-rails-context" style="display:none" data-rails-context="{&quot;inMailer&quot;:false,&quot;i18nLocale&quot;:&quot;en&quot;,&quot;i18nDefaultLocale&quot;:&quot;en&quot;,&quot;href&quot;:&quot;https://gadz.academia.edu/FakhreddineAbabsa&quot;,&quot;location&quot;:&quot;/FakhreddineAbabsa&quot;,&quot;scheme&quot;:&quot;https&quot;,&quot;host&quot;:&quot;gadz.academia.edu&quot;,&quot;port&quot;:null,&quot;pathname&quot;:&quot;/FakhreddineAbabsa&quot;,&quot;search&quot;:null,&quot;httpAcceptLanguage&quot;:null,&quot;serverSide&quot;:false}"></div> <div class="js-react-on-rails-component" style="display:none" data-component-name="Pill" data-props="{&quot;color&quot;:&quot;gray&quot;,&quot;children&quot;:[&quot;Eye tracking&quot;]}" data-trace="false" data-dom-id="Pill-react-component-e4575068-b8bb-41c2-ba05-8325bd720c28"></div> <div id="Pill-react-component-e4575068-b8bb-41c2-ba05-8325bd720c28"></div> </a><a data-click-track="profile-user-info-expand-research-interests" data-has-card-for-ri-list="263837202" href="https://www.academia.edu/Documents/in/Computer_Science"><div class="js-react-on-rails-component" style="display:none" data-component-name="Pill" data-props="{&quot;color&quot;:&quot;gray&quot;,&quot;children&quot;:[&quot;Computer Science&quot;]}" data-trace="false" data-dom-id="Pill-react-component-3440612d-a14d-48f1-91cb-726426a8ec43"></div> <div id="Pill-react-component-3440612d-a14d-48f1-91cb-726426a8ec43"></div> </a><a data-click-track="profile-user-info-expand-research-interests" data-has-card-for-ri-list="263837202" href="https://www.academia.edu/Documents/in/Paris"><div class="js-react-on-rails-component" style="display:none" data-component-name="Pill" data-props="{&quot;color&quot;:&quot;gray&quot;,&quot;children&quot;:[&quot;Paris&quot;]}" data-trace="false" data-dom-id="Pill-react-component-01b7b46f-31c0-4711-9d99-6d95abbeb1a2"></div> <div id="Pill-react-component-01b7b46f-31c0-4711-9d99-6d95abbeb1a2"></div> </a></div></div></div></div><div class="right-panel-container"><div class="user-content-wrapper"><div class="uploads-container" id="social-redesign-work-container"><div class="upload-header"><h2 class="ds2-5-heading-sans-serif-xs">Uploads</h2></div><div class="documents-container backbone-social-profile-documents" style="width: 100%;"><div class="u-taCenter"></div><div class="profile--tab_content_container js-tab-pane tab-pane active" id="all"><div class="profile--tab_heading_container js-section-heading" data-section="Papers" id="Papers"><h3 class="profile--tab_heading_container">Papers by Fakhreddine Ababsa</h3></div><div class="js-work-strip profile--work_container" data-work-id="111609821"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609821/Challenging_3D_Head_Tracking_and_Evaluation_Using_Unconstrained_Test_Data_Set"><img alt="Research paper thumbnail of Challenging 3D Head Tracking and Evaluation Using Unconstrained Test Data Set" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609821/Challenging_3D_Head_Tracking_and_Evaluation_Using_Unconstrained_Test_Data_Set">Challenging 3D Head Tracking and Evaluation Using Unconstrained Test Data Set</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">3D face tracking using one monocular camera is an important topic, since it is useful in many dom...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">3D face tracking using one monocular camera is an important topic, since it is useful in many domains such as: video surveillance system, human machine interaction, biometrics, etc. In this paper, we propose a new 3D face tracking which is robust to large head rotations. Underlying cascaded regression approach for 2D landmark detection, we build an extension in context of 3D pose tracking. To better work with out-of-plane issues, we extend the training dataset by including a new set of synthetic images. For evaluation, we propose to use a new recording system to capture automatically face pose ground-truth, and create a new test dataset, named U3PT (Unconstrained 3D Pose Tracking). Theperformance of our method along with the state-of-the-art methods are carried out to analyze advantage as well as limitations need to be improved in the future.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609821"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609821"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609821; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609821]").text(description); $(".js-view-count[data-work-id=111609821]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609821; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609821']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609821, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=111609821]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609821,"title":"Challenging 3D Head Tracking and Evaluation Using Unconstrained Test Data Set","translated_title":"","metadata":{"abstract":"3D face tracking using one monocular camera is an important topic, since it is useful in many domains such as: video surveillance system, human machine interaction, biometrics, etc. In this paper, we propose a new 3D face tracking which is robust to large head rotations. Underlying cascaded regression approach for 2D landmark detection, we build an extension in context of 3D pose tracking. To better work with out-of-plane issues, we extend the training dataset by including a new set of synthetic images. For evaluation, we propose to use a new recording system to capture automatically face pose ground-truth, and create a new test dataset, named U3PT (Unconstrained 3D Pose Tracking). Theperformance of our method along with the state-of-the-art methods are carried out to analyze advantage as well as limitations need to be improved in the future.","publication_date":{"day":1,"month":7,"year":2017,"errors":{}}},"translated_abstract":"3D face tracking using one monocular camera is an important topic, since it is useful in many domains such as: video surveillance system, human machine interaction, biometrics, etc. In this paper, we propose a new 3D face tracking which is robust to large head rotations. Underlying cascaded regression approach for 2D landmark detection, we build an extension in context of 3D pose tracking. To better work with out-of-plane issues, we extend the training dataset by including a new set of synthetic images. For evaluation, we propose to use a new recording system to capture automatically face pose ground-truth, and create a new test dataset, named U3PT (Unconstrained 3D Pose Tracking). Theperformance of our method along with the state-of-the-art methods are carried out to analyze advantage as well as limitations need to be improved in the future.","internal_url":"https://www.academia.edu/111609821/Challenging_3D_Head_Tracking_and_Evaluation_Using_Unconstrained_Test_Data_Set","translated_internal_url":"","created_at":"2023-12-16T23:55:59.827-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[],"slug":"Challenging_3D_Head_Tracking_and_Evaluation_Using_Unconstrained_Test_Data_Set","translated_slug":"","page_count":null,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence"},{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision"},{"id":9173,"name":"Biometrics","url":"https://www.academia.edu/Documents/in/Biometrics"},{"id":822358,"name":"Ground Truth","url":"https://www.academia.edu/Documents/in/Ground_Truth"}],"urls":[{"id":37229940,"url":"https://doi.org/10.1109/iv.2017.40"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609820"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609820/Shape_related_constraints_aware_generation_of_Mechanical_Designs_through_Deep_Convolutional_GAN"><img alt="Research paper thumbnail of Shape related constraints aware generation of Mechanical Designs through Deep Convolutional GAN" class="work-thumbnail" src="https://attachments.academia-assets.com/109098709/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609820/Shape_related_constraints_aware_generation_of_Mechanical_Designs_through_Deep_Convolutional_GAN">Shape related constraints aware generation of Mechanical Designs through Deep Convolutional GAN</a></div><div class="wp-workCard_item"><span>arXiv (Cornell University)</span><span>, Oct 22, 2020</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="f71406dfe6546091ea56a2310a847033" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098709,&quot;asset_id&quot;:111609820,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098709/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609820"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609820"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609820; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609820]").text(description); $(".js-view-count[data-work-id=111609820]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609820; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609820']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609820, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "f71406dfe6546091ea56a2310a847033" } } $('.js-work-strip[data-work-id=111609820]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609820,"title":"Shape related constraints aware generation of Mechanical Designs through Deep Convolutional GAN","translated_title":"","metadata":{"publisher":"Cornell University","grobid_abstract":"Mechanical product engineering often must comply with manufacturing or geometric constraints related to the shaping process. Mechanical design hence should rely on robust and fast tools to explore complex shapes, typically for design for additive manufacturing (DfAM). Topology optimization is such a powerful tool, yet integrating geometric constraints (shape-related) into it is hard. In this work, we leverage machine learning capability to handle complex geometric and spatial correlations to integrate into the mechanical design process geometry-related constraints at the conceptual level. More precisely, we explore the generative capabilities of recent Deep Learning architectures to enhance mechanical designs, typically for additive manufacturing. In this work, we build a generative Deep-Learning-based approach of topology optimization integrating mechanical conditions in addition to one typical manufacturing condition (the complexity of a design i.e. a geometrical condition). The approach is a dual-discriminator GAN: a generator that takes as input the mechanical and geometrical conditions and outputs a 2D structure and two discriminators, one to ensure that the generated structure follows the mechanical constraints and the other to assess the geometrical constraint. We also explore the generation of designs with a non-uniform material distribution and show promising results.","publication_date":{"day":22,"month":10,"year":2020,"errors":{}},"publication_name":"arXiv (Cornell University)","grobid_abstract_attachment_id":109098709},"translated_abstract":null,"internal_url":"https://www.academia.edu/111609820/Shape_related_constraints_aware_generation_of_Mechanical_Designs_through_Deep_Convolutional_GAN","translated_internal_url":"","created_at":"2023-12-16T23:55:59.606-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098709,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098709/thumbnails/1.jpg","file_name":"2010.11833.pdf","download_url":"https://www.academia.edu/attachments/109098709/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Shape_related_constraints_aware_generati.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098709/2010.11833-libre.pdf?1702800376=\u0026response-content-disposition=attachment%3B+filename%3DShape_related_constraints_aware_generati.pdf\u0026Expires=1733201480\u0026Signature=GtX9gdbrDRRxKBy1Hyj~mDFYQ1JkVXXMvP~NW6HMiTy9anmQsIGorin57E4n906UdlAEgceBdmEyAQgsDKEZUdNbu5Esod6oPegFEbq6uHXtE7TRUS7VE53s8RDgqoYq7GWPrYYp5D1FH5sqEZ288k-ncbsJVhXUta74LXRC2mIxhPrj8MtmZhnTeBGCNRqJkpeJHErhrC4sV0wq8pR8LRMQbSTjtjARdA1WUxeJEPYhm~r7aVWa2zox0~vBg~4dg92ThwYDMr3FNo2m1qNStURGr5jUv3o-omuxinPlZh1JAapMJVp2fJsjhkZW6iRpFp81JLB45Lv0drU142WFlw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Shape_related_constraints_aware_generation_of_Mechanical_Designs_through_Deep_Convolutional_GAN","translated_slug":"","page_count":55,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098709,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098709/thumbnails/1.jpg","file_name":"2010.11833.pdf","download_url":"https://www.academia.edu/attachments/109098709/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Shape_related_constraints_aware_generati.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098709/2010.11833-libre.pdf?1702800376=\u0026response-content-disposition=attachment%3B+filename%3DShape_related_constraints_aware_generati.pdf\u0026Expires=1733201480\u0026Signature=GtX9gdbrDRRxKBy1Hyj~mDFYQ1JkVXXMvP~NW6HMiTy9anmQsIGorin57E4n906UdlAEgceBdmEyAQgsDKEZUdNbu5Esod6oPegFEbq6uHXtE7TRUS7VE53s8RDgqoYq7GWPrYYp5D1FH5sqEZ288k-ncbsJVhXUta74LXRC2mIxhPrj8MtmZhnTeBGCNRqJkpeJHErhrC4sV0wq8pR8LRMQbSTjtjARdA1WUxeJEPYhm~r7aVWa2zox0~vBg~4dg92ThwYDMr3FNo2m1qNStURGr5jUv3o-omuxinPlZh1JAapMJVp2fJsjhkZW6iRpFp81JLB45Lv0drU142WFlw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":20097,"name":"Topology Optimization","url":"https://www.academia.edu/Documents/in/Topology_Optimization"},{"id":3193313,"name":"arXiv","url":"https://www.academia.edu/Documents/in/arXiv"}],"urls":[{"id":37229938,"url":"http://arxiv.org/pdf/2010.11833"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609818"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609818/A_Simple_Human_Activity_Recognition_Technique_Using_DCT"><img alt="Research paper thumbnail of A Simple Human Activity Recognition Technique Using DCT" class="work-thumbnail" src="https://attachments.academia-assets.com/109098693/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609818/A_Simple_Human_Activity_Recognition_Technique_Using_DCT">A Simple Human Activity Recognition Technique Using DCT</a></div><div class="wp-workCard_item"><span>Advanced Concepts for Intelligent Vision Systems</span><span>, 2016</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="da65baa95a9ce7e6189c19f9fb2160ca" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098693,&quot;asset_id&quot;:111609818,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098693/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609818"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609818"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609818; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609818]").text(description); $(".js-view-count[data-work-id=111609818]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609818; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609818']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609818, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "da65baa95a9ce7e6189c19f9fb2160ca" } } $('.js-work-strip[data-work-id=111609818]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609818,"title":"A Simple Human Activity Recognition Technique Using DCT","translated_title":"","metadata":{"publisher":"Springer International Publishing","grobid_abstract":"In this paper, we present a simple new human activity recognition method using discrete cosine transform (DCT). The scheme uses the DCT coefficients extracted from silhouettes as descriptors (features) and performs frame-by-frame recognition, which make it simple and suitable for real time applications. We carried out several tests using radial basis neural network (RBF) for classification, a comparative study against stat-of-the-art methods shows that our technique is faster, simple and gives higher accuracy performance comparing to discrete transform based techniques and other methods proposed in literature.","publication_date":{"day":null,"month":null,"year":2016,"errors":{}},"publication_name":"Advanced Concepts for Intelligent Vision Systems","grobid_abstract_attachment_id":109098693},"translated_abstract":null,"internal_url":"https://www.academia.edu/111609818/A_Simple_Human_Activity_Recognition_Technique_Using_DCT","translated_internal_url":"","created_at":"2023-12-16T23:55:59.363-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098693,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098693/thumbnails/1.jpg","file_name":"978-3-319-48680-2_4.pdf","download_url":"https://www.academia.edu/attachments/109098693/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"A_Simple_Human_Activity_Recognition_Tech.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098693/978-3-319-48680-2_4-libre.pdf?1702800351=\u0026response-content-disposition=attachment%3B+filename%3DA_Simple_Human_Activity_Recognition_Tech.pdf\u0026Expires=1733201480\u0026Signature=EQt6B6FUsuPHteYqGUdSVSZIszIXjIQCwtKa8yLvX3VGd6QYthB5RJDB4nO0Zswe1bAEacBdr~V7WkmpRAvIB2DEEICukxAGiON7q0p~kAsvLXaFFV07yL1xA7THgYvRooQHc0JIbP9C85YWDhgGWDX~70MTZJibSGHvQypsefIELyTcIzz6Yw~PSTYp3lCCg7s3V4aygBEJtfmAOFB0hTPh5G3AA~F48RLu3Pg3lL9N1u74z1uIjeOdMc-O~LG7WE251NfcFaShrE9uzDNGnzPCYxYxZHQRkcTxrNxryfGpyTj7fffvx3tdC7FEzEMd-od~-Q4wkGVwB1HxEtyfnA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"A_Simple_Human_Activity_Recognition_Technique_Using_DCT","translated_slug":"","page_count":10,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098693,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098693/thumbnails/1.jpg","file_name":"978-3-319-48680-2_4.pdf","download_url":"https://www.academia.edu/attachments/109098693/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"A_Simple_Human_Activity_Recognition_Tech.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098693/978-3-319-48680-2_4-libre.pdf?1702800351=\u0026response-content-disposition=attachment%3B+filename%3DA_Simple_Human_Activity_Recognition_Tech.pdf\u0026Expires=1733201480\u0026Signature=EQt6B6FUsuPHteYqGUdSVSZIszIXjIQCwtKa8yLvX3VGd6QYthB5RJDB4nO0Zswe1bAEacBdr~V7WkmpRAvIB2DEEICukxAGiON7q0p~kAsvLXaFFV07yL1xA7THgYvRooQHc0JIbP9C85YWDhgGWDX~70MTZJibSGHvQypsefIELyTcIzz6Yw~PSTYp3lCCg7s3V4aygBEJtfmAOFB0hTPh5G3AA~F48RLu3Pg3lL9N1u74z1uIjeOdMc-O~LG7WE251NfcFaShrE9uzDNGnzPCYxYxZHQRkcTxrNxryfGpyTj7fffvx3tdC7FEzEMd-od~-Q4wkGVwB1HxEtyfnA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"},{"id":109098702,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098702/thumbnails/1.jpg","file_name":"978-3-319-48680-2_4.pdf","download_url":"https://www.academia.edu/attachments/109098702/download_file","bulk_download_file_name":"A_Simple_Human_Activity_Recognition_Tech.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098702/978-3-319-48680-2_4-libre.pdf?1702800351=\u0026response-content-disposition=attachment%3B+filename%3DA_Simple_Human_Activity_Recognition_Tech.pdf\u0026Expires=1733201480\u0026Signature=HIOGJZGk33EDofYYcston5NHrqqknsS4X0LmJujQLBwCcLK6yYK1TFKoK-WtnRyYmkVPxnsdV5QciGESe5~NrlPt1eclMavANOZe~1zIbjaVswUUym0ITGO0DJwCCsvcB-f1F8WsTSYXLS7O4XOKQ7FYWurnM60i8zrjq-nm8loGJixck1NgD6juSqn9MSgloRdd-Zd~4RekYP2P3lXqzh~9pUirI6a4xbQIknKg65CPMP2dyx-fVk8sItMd6g26rVBoSX1JasJ5Tz0T5QusgpUYSFwe5UIVElbCWpdGxeMX4D8KHyvMAwqp4cVVPBfFhx3b0dCa2d~VtHhwbyjpWQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":185596,"name":"Discrete Cosine Transform","url":"https://www.academia.edu/Documents/in/Discrete_Cosine_Transform"}],"urls":[{"id":37229937,"url":"http://link.springer.com/content/pdf/10.1007/978-3-319-48680-2_4"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609817"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609817/Evaluating_Added_Value_of_Augmented_Reality_to_Assist_Aeronautical_Maintenance_Workers_Experimentation_on_On_field_Use_Case"><img alt="Research paper thumbnail of Evaluating Added Value of Augmented Reality to Assist Aeronautical Maintenance Workers—Experimentation on On-field Use Case" class="work-thumbnail" src="https://attachments.academia-assets.com/109098772/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609817/Evaluating_Added_Value_of_Augmented_Reality_to_Assist_Aeronautical_Maintenance_Workers_Experimentation_on_On_field_Use_Case">Evaluating Added Value of Augmented Reality to Assist Aeronautical Maintenance Workers—Experimentation on On-field Use Case</a></div><div class="wp-workCard_item"><span>Virtual Reality and Augmented Reality</span><span>, 2019</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="8ba014367da6135276fead4ce09a5cc0" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098772,&quot;asset_id&quot;:111609817,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098772/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609817"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609817"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609817; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609817]").text(description); $(".js-view-count[data-work-id=111609817]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609817; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609817']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609817, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "8ba014367da6135276fead4ce09a5cc0" } } $('.js-work-strip[data-work-id=111609817]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609817,"title":"Evaluating Added Value of Augmented Reality to Assist Aeronautical Maintenance Workers—Experimentation on On-field Use Case","translated_title":"","metadata":{"publisher":"Springer International Publishing","grobid_abstract":"Augmented Reality (AR) technology facilitates interactions with information and understanding of complex situations. Aeronautical Maintenance combines complexity induced by the variety of products and constraints associated to aeronautic sector and the environment of maintenance. AR tools seem well indicated to solve constraints of productivity and quality on the aeronautical maintenance activities by simplifying data interactions for the workers. However, few evaluations of AR have been done in real processes due to the difficulty of integrating the technology without proper tools for deployment and assessing the results. This paper proposes a method to select suitable criteria for AR evaluation in industrial environment and to deploy AR solutions suited to assist maintenance workers. These are used to set up on-field experiments that demonstrate benefits of AR on process and user point of view for different profiles of workers. Further work will consist on using these elements to extend results to AR evaluation on the whole aeronautical maintenance process. A classification of maintenance activities linked to workers specific needs will lead to prediction of the value that augmented reality would bring to each activity.","publication_date":{"day":null,"month":null,"year":2019,"errors":{}},"publication_name":"Virtual Reality and Augmented Reality","grobid_abstract_attachment_id":109098772},"translated_abstract":null,"internal_url":"https://www.academia.edu/111609817/Evaluating_Added_Value_of_Augmented_Reality_to_Assist_Aeronautical_Maintenance_Workers_Experimentation_on_On_field_Use_Case","translated_internal_url":"","created_at":"2023-12-16T23:55:59.149-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098772,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098772/thumbnails/1.jpg","file_name":"LISPEN_EUROVR_2019_LOIZEAU.pdf","download_url":"https://www.academia.edu/attachments/109098772/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Evaluating_Added_Value_of_Augmented_Real.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098772/LISPEN_EUROVR_2019_LOIZEAU-libre.pdf?1702800350=\u0026response-content-disposition=attachment%3B+filename%3DEvaluating_Added_Value_of_Augmented_Real.pdf\u0026Expires=1733201480\u0026Signature=gakZpyn5rTY4nRAog5fDUMKClcaeZtdEO1JdU80p4w3YdTklTBWZdpDmLj1nFMlHHZjoGtN5ZFG-UpXlu47wHpMxvMjLQs9f1qXAanUDjE~acwvQA5jOfPEXJuWhPX0sF95oSAzAWdBrDIn4lnRVKLWtUDoOJW9a2DMV5HMOgRKQW-~hDnmRmYs4f5EV3yXYw5Ws34WjeChOyJr7AEpB2-zkk5TgTxGaqXWaLXBJkwbqkaS3I1~-mmsiV1R7DujImxOAmzhBomTYCrcS3-fAWI8Of9HjQAraefiiJhQLxJcrF~oCBnvSLL0p72NowjXF17xsLTaZ3oVXZVD6TndNvg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Evaluating_Added_Value_of_Augmented_Reality_to_Assist_Aeronautical_Maintenance_Workers_Experimentation_on_On_field_Use_Case","translated_slug":"","page_count":20,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098772,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098772/thumbnails/1.jpg","file_name":"LISPEN_EUROVR_2019_LOIZEAU.pdf","download_url":"https://www.academia.edu/attachments/109098772/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Evaluating_Added_Value_of_Augmented_Real.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098772/LISPEN_EUROVR_2019_LOIZEAU-libre.pdf?1702800350=\u0026response-content-disposition=attachment%3B+filename%3DEvaluating_Added_Value_of_Augmented_Real.pdf\u0026Expires=1733201480\u0026Signature=gakZpyn5rTY4nRAog5fDUMKClcaeZtdEO1JdU80p4w3YdTklTBWZdpDmLj1nFMlHHZjoGtN5ZFG-UpXlu47wHpMxvMjLQs9f1qXAanUDjE~acwvQA5jOfPEXJuWhPX0sF95oSAzAWdBrDIn4lnRVKLWtUDoOJW9a2DMV5HMOgRKQW-~hDnmRmYs4f5EV3yXYw5Ws34WjeChOyJr7AEpB2-zkk5TgTxGaqXWaLXBJkwbqkaS3I1~-mmsiV1R7DujImxOAmzhBomTYCrcS3-fAWI8Of9HjQAraefiiJhQLxJcrF~oCBnvSLL0p72NowjXF17xsLTaZ3oVXZVD6TndNvg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":5673,"name":"Augmented Reality","url":"https://www.academia.edu/Documents/in/Augmented_Reality"},{"id":345840,"name":"Criteria","url":"https://www.academia.edu/Documents/in/Criteria"},{"id":453754,"name":"Augmented and Virtual Reality for Civil and Building Engineering","url":"https://www.academia.edu/Documents/in/Augmented_and_Virtual_Reality_for_Civil_and_Building_Engineering"},{"id":783265,"name":"Added Value","url":"https://www.academia.edu/Documents/in/Added_Value"},{"id":1268642,"name":"Software Deployment","url":"https://www.academia.edu/Documents/in/Software_Deployment"}],"urls":[{"id":37229935,"url":"http://link.springer.com/content/pdf/10.1007/978-3-030-31908-3_10"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609816"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609816/Pedestrian_Using_Catadioptric_Sensor_12"><img alt="Research paper thumbnail of Pedestrian Using Catadioptric Sensor 12" class="work-thumbnail" src="https://attachments.academia-assets.com/109098687/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609816/Pedestrian_Using_Catadioptric_Sensor_12">Pedestrian Using Catadioptric Sensor 12</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">We investigate the detection of person in the omnidirectional images, adopting a linear SVM. We h...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">We investigate the detection of person in the omnidirectional images, adopting a linear SVM. We have implemented HOG-based descriptors, for omnidirectional and spherical images. In this paper we studied the influence of each parameter in our algorithm on the performances of person detections in catadioptric images. However, few studies have elaborated the problem of human detection using this type of cameras; therefore we have set up our own test base. Our results show that our detector can robustly detect people in omnidirectional images, as soon as the algorithm is adapted to the distortions introduced by the use of the omnidirectional camera.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="2d08d36df00d97618e86dc94747af02f" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098687,&quot;asset_id&quot;:111609816,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098687/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609816"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609816"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609816; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609816]").text(description); $(".js-view-count[data-work-id=111609816]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609816; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609816']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609816, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "2d08d36df00d97618e86dc94747af02f" } } $('.js-work-strip[data-work-id=111609816]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609816,"title":"Pedestrian Using Catadioptric Sensor 12","translated_title":"","metadata":{"abstract":"We investigate the detection of person in the omnidirectional images, adopting a linear SVM. We have implemented HOG-based descriptors, for omnidirectional and spherical images. In this paper we studied the influence of each parameter in our algorithm on the performances of person detections in catadioptric images. However, few studies have elaborated the problem of human detection using this type of cameras; therefore we have set up our own test base. Our results show that our detector can robustly detect people in omnidirectional images, as soon as the algorithm is adapted to the distortions introduced by the use of the omnidirectional camera.","publication_date":{"day":null,"month":null,"year":2018,"errors":{}}},"translated_abstract":"We investigate the detection of person in the omnidirectional images, adopting a linear SVM. We have implemented HOG-based descriptors, for omnidirectional and spherical images. In this paper we studied the influence of each parameter in our algorithm on the performances of person detections in catadioptric images. However, few studies have elaborated the problem of human detection using this type of cameras; therefore we have set up our own test base. Our results show that our detector can robustly detect people in omnidirectional images, as soon as the algorithm is adapted to the distortions introduced by the use of the omnidirectional camera.","internal_url":"https://www.academia.edu/111609816/Pedestrian_Using_Catadioptric_Sensor_12","translated_internal_url":"","created_at":"2023-12-16T23:55:58.948-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098687,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098687/thumbnails/1.jpg","file_name":"2Vol96No8.pdf","download_url":"https://www.academia.edu/attachments/109098687/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Pedestrian_Using_Catadioptric_Sensor_12.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098687/2Vol96No8-libre.pdf?1702800377=\u0026response-content-disposition=attachment%3B+filename%3DPedestrian_Using_Catadioptric_Sensor_12.pdf\u0026Expires=1733201480\u0026Signature=W5txYwR5ZbQtC1qDbXWQIV6Ib6eEONivSh6oBhsCAAXRopWCVdFiCv42~EZya3xYxAJEa3vgpwDwYBUCE-n22p0gWZybuBW7VkismYfk3Z3M5r3FcUYLxC441MtTsbxynDuvlw1CJPnxOSTL-rz0XKgpPkXPEUUvps3KI44DXd9h3nJi4DNT86TyjIz7WUAjTP~uwqIvIZKPEl5U4yWfMmjhPsnHdRORtbPNFp8FIINVUcqzzwFmYPuRNYZByLr7WqE5Z1yIFxCKTYhEp8hD-Hj9WGv0xuCA3r7vtCM23vBmlYmN6J~nEnP~HKDOBXNg9FSWJB0rwlZAH6vsBEadEQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Pedestrian_Using_Catadioptric_Sensor_12","translated_slug":"","page_count":10,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098687,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098687/thumbnails/1.jpg","file_name":"2Vol96No8.pdf","download_url":"https://www.academia.edu/attachments/109098687/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Pedestrian_Using_Catadioptric_Sensor_12.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098687/2Vol96No8-libre.pdf?1702800377=\u0026response-content-disposition=attachment%3B+filename%3DPedestrian_Using_Catadioptric_Sensor_12.pdf\u0026Expires=1733201480\u0026Signature=W5txYwR5ZbQtC1qDbXWQIV6Ib6eEONivSh6oBhsCAAXRopWCVdFiCv42~EZya3xYxAJEa3vgpwDwYBUCE-n22p0gWZybuBW7VkismYfk3Z3M5r3FcUYLxC441MtTsbxynDuvlw1CJPnxOSTL-rz0XKgpPkXPEUUvps3KI44DXd9h3nJi4DNT86TyjIz7WUAjTP~uwqIvIZKPEl5U4yWfMmjhPsnHdRORtbPNFp8FIINVUcqzzwFmYPuRNYZByLr7WqE5Z1yIFxCKTYhEp8hD-Hj9WGv0xuCA3r7vtCM23vBmlYmN6J~nEnP~HKDOBXNg9FSWJB0rwlZAH6vsBEadEQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"},{"id":109098701,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098701/thumbnails/1.jpg","file_name":"2Vol96No8.pdf","download_url":"https://www.academia.edu/attachments/109098701/download_file","bulk_download_file_name":"Pedestrian_Using_Catadioptric_Sensor_12.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098701/2Vol96No8-libre.pdf?1702800378=\u0026response-content-disposition=attachment%3B+filename%3DPedestrian_Using_Catadioptric_Sensor_12.pdf\u0026Expires=1733201480\u0026Signature=af6tK71rJ7V6wvmWmL00ga5FRO3q9A2O3u3ANlQVpVEWdkXohls-~WTfWSwalleRg-ijIAI242KWWvbLv1XDjHCgyMJmXwexpBKA4pHlZ9otLgLROHzPOEytX2uRM6w9RzfirY1DVJ7bM~0E4KxbOz0H5ZQNH6gBkMcoxpuV-l4LOL9fmmyLpJPOx3t3clYvEMZvEiVwQVel0bDfdI4O8itHrJFMsd9CQCOxLaTXzz93NpFaX8Sb8LyTU39Y4imT8jT6zkpNyeldBoK22bYc0xkkceh1u5HxcCJnQwUkeG5l0UtWh-XYxHRDG3bVgXxp6NmzBU~qMX5zNLJkzcXHGg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[],"urls":[{"id":37229933,"url":"http://www.jatit.org/volumes/Vol96No8/2Vol96No8.pdf"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609814"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609814/Augmented_Reality_assistance_for_R_and_D_assembly_in_Aeronautics"><img alt="Research paper thumbnail of Augmented Reality assistance for R&amp;D assembly in Aeronautics" class="work-thumbnail" src="https://attachments.academia-assets.com/109098692/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609814/Augmented_Reality_assistance_for_R_and_D_assembly_in_Aeronautics">Augmented Reality assistance for R&amp;D assembly in Aeronautics</a></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="b8321624b867f8c22fc9e24801590d6f" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098692,&quot;asset_id&quot;:111609814,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098692/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609814"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609814"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609814; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609814]").text(description); $(".js-view-count[data-work-id=111609814]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609814; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609814']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609814, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "b8321624b867f8c22fc9e24801590d6f" } } $('.js-work-strip[data-work-id=111609814]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609814,"title":"Augmented Reality assistance for R\u0026D assembly in Aeronautics","translated_title":"","metadata":{"publication_date":{"day":null,"month":null,"year":2018,"errors":{}}},"translated_abstract":null,"internal_url":"https://www.academia.edu/111609814/Augmented_Reality_assistance_for_R_and_D_assembly_in_Aeronautics","translated_internal_url":"","created_at":"2023-12-16T23:55:58.744-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098692,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098692/thumbnails/1.jpg","file_name":"LISPEN_EUROVR82018_ABABSA.pdf","download_url":"https://www.academia.edu/attachments/109098692/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Augmented_Reality_assistance_for_R_and_D.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098692/LISPEN_EUROVR82018_ABABSA-libre.pdf?1702800348=\u0026response-content-disposition=attachment%3B+filename%3DAugmented_Reality_assistance_for_R_and_D.pdf\u0026Expires=1733201480\u0026Signature=Uzyc1RG4turQjc88aKxgcY2BTOmhLV-zEh5msuyu64-mFHZ~LPeiVs06xZxSWrOudwOJM6AvxG3hKMfH2TS0Uqwig~Lj981NGv5Cde5vPpue3QtLFSol0GlFPtzbWf62UZrwuoKvTkKx~cCec3ORh9H9KcAJ5CFFf7SdmTGGN7e2b2543e9nf-mW6MobJAu20JgiD3QcdLwyoOuG3S3m8M6HOvmSeqDL6CQI5zmjLXil2qmMk-dAFlKfO~7~P988dBsEO2VNtHUni6Eh9Ot74hUR-0RDxjRK2CXv2BGL3jNtJCG67FSApaam06nM~JLewBgMiECSXmtDP99FWWmhXA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Augmented_Reality_assistance_for_R_and_D_assembly_in_Aeronautics","translated_slug":"","page_count":3,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098692,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098692/thumbnails/1.jpg","file_name":"LISPEN_EUROVR82018_ABABSA.pdf","download_url":"https://www.academia.edu/attachments/109098692/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Augmented_Reality_assistance_for_R_and_D.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098692/LISPEN_EUROVR82018_ABABSA-libre.pdf?1702800348=\u0026response-content-disposition=attachment%3B+filename%3DAugmented_Reality_assistance_for_R_and_D.pdf\u0026Expires=1733201480\u0026Signature=Uzyc1RG4turQjc88aKxgcY2BTOmhLV-zEh5msuyu64-mFHZ~LPeiVs06xZxSWrOudwOJM6AvxG3hKMfH2TS0Uqwig~Lj981NGv5Cde5vPpue3QtLFSol0GlFPtzbWf62UZrwuoKvTkKx~cCec3ORh9H9KcAJ5CFFf7SdmTGGN7e2b2543e9nf-mW6MobJAu20JgiD3QcdLwyoOuG3S3m8M6HOvmSeqDL6CQI5zmjLXil2qmMk-dAFlKfO~7~P988dBsEO2VNtHUni6Eh9Ot74hUR-0RDxjRK2CXv2BGL3jNtJCG67FSApaam06nM~JLewBgMiECSXmtDP99FWWmhXA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"},{"id":109098699,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098699/thumbnails/1.jpg","file_name":"LISPEN_EUROVR82018_ABABSA.pdf","download_url":"https://www.academia.edu/attachments/109098699/download_file","bulk_download_file_name":"Augmented_Reality_assistance_for_R_and_D.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098699/LISPEN_EUROVR82018_ABABSA-libre.pdf?1702800347=\u0026response-content-disposition=attachment%3B+filename%3DAugmented_Reality_assistance_for_R_and_D.pdf\u0026Expires=1733201480\u0026Signature=OOvsRcIkVPclLA3DALDb3uzrIVxEyUHgVeznXfk75DzLc-466VvwrZoEf7Om0hiIIc~2Fj6HGzI5TnnF5nz3~FOvdAD7VvNvCLPgp-WkiCKZSJ7gubIbv5HMrAq~k1m23faVauyNHEoldXFXv6TPSQn3yiNIIIl4xmtpqwSvAayOJAJAHV6aVBAgvSO7BsptSLb1l-SpGrUif~jnKWi91iHZ-~dO0I5OoLFNGGzuLpbHQ~045VNpXHcpo7kCoW-fGg8hAx4aoG3H3NphfuBDRLcwTE8gzTaYpZxM4eqQxWcAfdnDFeIEU2DovZZYoXHvPR0r7yTyl8WZrmcK5JC94g__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":48,"name":"Engineering","url":"https://www.academia.edu/Documents/in/Engineering"},{"id":88,"name":"Aerospace Engineering","url":"https://www.academia.edu/Documents/in/Aerospace_Engineering"},{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":5673,"name":"Augmented Reality","url":"https://www.academia.edu/Documents/in/Augmented_Reality"},{"id":89988,"name":"Assembly","url":"https://www.academia.edu/Documents/in/Assembly"},{"id":181995,"name":"Aeronautics","url":"https://www.academia.edu/Documents/in/Aeronautics"},{"id":1133084,"name":"Factory of the Future","url":"https://www.academia.edu/Documents/in/Factory_of_the_Future"}],"urls":[{"id":37229932,"url":"https://sam.ensam.eu/bitstream/handle/10985/14160/LISPEN_EUROVR82018_ABABSA.pdf?isAllowed=y\u0026sequence=1"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609812"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609812/Usability_of_Augmented_Reality_in_Aeronautic_Maintenance_Repair_and_Overhaul"><img alt="Research paper thumbnail of Usability of Augmented Reality in Aeronautic Maintenance, Repair and Overhaul" class="work-thumbnail" src="https://attachments.academia-assets.com/109098694/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609812/Usability_of_Augmented_Reality_in_Aeronautic_Maintenance_Repair_and_Overhaul">Usability of Augmented Reality in Aeronautic Maintenance, Repair and Overhaul</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Augmented Reality (AR) is a strong growing research topic in several areas including industry, tr...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Augmented Reality (AR) is a strong growing research topic in several areas including industry, training, art and entertainment. AR can help users to achieve very complex tasks by enhancing their vision with useful and well-adapted information. This paper deals with evaluating the usability of AR in aeronautic maintenance training tasks. A case study in the on-site maintenance department was conducted using an augmented reality application, involving operators at several levels of expertise. Obtained results highlighted the full efficacy of AR in the field of aeronautic maintenance. CCS Concepts • Human-centered computing → Mixed / augmented reality;</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="8347157cb8c47b51c9486658ecb28495" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098694,&quot;asset_id&quot;:111609812,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098694/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609812"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609812"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609812; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609812]").text(description); $(".js-view-count[data-work-id=111609812]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609812; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609812']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609812, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "8347157cb8c47b51c9486658ecb28495" } } $('.js-work-strip[data-work-id=111609812]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609812,"title":"Usability of Augmented Reality in Aeronautic Maintenance, Repair and Overhaul","translated_title":"","metadata":{"abstract":"Augmented Reality (AR) is a strong growing research topic in several areas including industry, training, art and entertainment. AR can help users to achieve very complex tasks by enhancing their vision with useful and well-adapted information. This paper deals with evaluating the usability of AR in aeronautic maintenance training tasks. A case study in the on-site maintenance department was conducted using an augmented reality application, involving operators at several levels of expertise. Obtained results highlighted the full efficacy of AR in the field of aeronautic maintenance. CCS Concepts • Human-centered computing → Mixed / augmented reality;","publisher":"ICAT-EGVE","publication_date":{"day":null,"month":null,"year":2018,"errors":{}}},"translated_abstract":"Augmented Reality (AR) is a strong growing research topic in several areas including industry, training, art and entertainment. AR can help users to achieve very complex tasks by enhancing their vision with useful and well-adapted information. This paper deals with evaluating the usability of AR in aeronautic maintenance training tasks. A case study in the on-site maintenance department was conducted using an augmented reality application, involving operators at several levels of expertise. Obtained results highlighted the full efficacy of AR in the field of aeronautic maintenance. CCS Concepts • Human-centered computing → Mixed / augmented reality;","internal_url":"https://www.academia.edu/111609812/Usability_of_Augmented_Reality_in_Aeronautic_Maintenance_Repair_and_Overhaul","translated_internal_url":"","created_at":"2023-12-16T23:55:58.522-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098694,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098694/thumbnails/1.jpg","file_name":"LISPEN_ICAT-EGVE_2018_ABABSA.pdf","download_url":"https://www.academia.edu/attachments/109098694/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Usability_of_Augmented_Reality_in_Aerona.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098694/LISPEN_ICAT-EGVE_2018_ABABSA-libre.pdf?1702800350=\u0026response-content-disposition=attachment%3B+filename%3DUsability_of_Augmented_Reality_in_Aerona.pdf\u0026Expires=1733201480\u0026Signature=Su7b4wWCWdt60tgbyqfw716wNX7AVPNmw5WQT~GgheBN4J0YITCvGGFENHAyjNqMIOPyAfeiwuTmMSb~ViVUiWTxBWyQ6Hj10GXG1bVVmmoW2IqDBhmUGLZMmcIW51by2NEEN~3Pjjj4Ic4Qu~lPSN3k~Kd2h2aVsqSoYPP~vN2mYfCVnldZOBqODsBReT1jB5d7kYhcflm~I4-cDMkwg6ol52i9qZhTpaalBQzAafWp-nUBe2H8Z6yZXJZfu66nV2UIsDSJGIYAX0Em76-Oa46SYjX-rzP9Hri1Rq0Uh4NVLJC4QlbPnqiNAeNcLJCEX5QQWyuMUiC3XUXgIpf4Zw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Usability_of_Augmented_Reality_in_Aeronautic_Maintenance_Repair_and_Overhaul","translated_slug":"","page_count":5,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098694,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098694/thumbnails/1.jpg","file_name":"LISPEN_ICAT-EGVE_2018_ABABSA.pdf","download_url":"https://www.academia.edu/attachments/109098694/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Usability_of_Augmented_Reality_in_Aerona.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098694/LISPEN_ICAT-EGVE_2018_ABABSA-libre.pdf?1702800350=\u0026response-content-disposition=attachment%3B+filename%3DUsability_of_Augmented_Reality_in_Aerona.pdf\u0026Expires=1733201480\u0026Signature=Su7b4wWCWdt60tgbyqfw716wNX7AVPNmw5WQT~GgheBN4J0YITCvGGFENHAyjNqMIOPyAfeiwuTmMSb~ViVUiWTxBWyQ6Hj10GXG1bVVmmoW2IqDBhmUGLZMmcIW51by2NEEN~3Pjjj4Ic4Qu~lPSN3k~Kd2h2aVsqSoYPP~vN2mYfCVnldZOBqODsBReT1jB5d7kYhcflm~I4-cDMkwg6ol52i9qZhTpaalBQzAafWp-nUBe2H8Z6yZXJZfu66nV2UIsDSJGIYAX0Em76-Oa46SYjX-rzP9Hri1Rq0Uh4NVLJC4QlbPnqiNAeNcLJCEX5QQWyuMUiC3XUXgIpf4Zw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"},{"id":109098695,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098695/thumbnails/1.jpg","file_name":"LISPEN_ICAT-EGVE_2018_ABABSA.pdf","download_url":"https://www.academia.edu/attachments/109098695/download_file","bulk_download_file_name":"Usability_of_Augmented_Reality_in_Aerona.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098695/LISPEN_ICAT-EGVE_2018_ABABSA-libre.pdf?1702800348=\u0026response-content-disposition=attachment%3B+filename%3DUsability_of_Augmented_Reality_in_Aerona.pdf\u0026Expires=1733201480\u0026Signature=U4oyTgGiTPxseTeWQNUkuAYCDQc4RNPEAHdHGVsSu94fDoE1fb68-C6YzdkKLTd3SJNt48WiPZAE1NWcxzsnwF6V4FZh3vjroBgSPkP1kXCF6gI2Io4ZARiyLYyufbmfBxc9e0xOrn0wRjE2OEzIZLWMloF7RbfwaImaMRo2r3hE9bpckJXDHeyzXxljR8faM8faQkoIZr2zfddvEqkyND6uXQMOcTOVqrBj3i8xQJzay-66hwl54NsaD-FltxrJ1GCIai-AbWirj-2XlZWK82CrwkzLDHAsfewMVIZC0GU4ppDZWnVdpt914jYtB~Za56T1GhEFtYpJLDHuI5Cdsg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":48,"name":"Engineering","url":"https://www.academia.edu/Documents/in/Engineering"},{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":472,"name":"Human Computer Interaction","url":"https://www.academia.edu/Documents/in/Human_Computer_Interaction"},{"id":1615,"name":"Usability","url":"https://www.academia.edu/Documents/in/Usability"},{"id":5673,"name":"Augmented Reality","url":"https://www.academia.edu/Documents/in/Augmented_Reality"},{"id":87372,"name":"Maintenance","url":"https://www.academia.edu/Documents/in/Maintenance"}],"urls":[{"id":37229931,"url":"https://sam.ensam.eu/bitstream/handle/10985/14360/LISPEN_ICAT-EGVE_2018_ABABSA.pdf?isAllowed=y\u0026sequence=4"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609811"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609811/Free_Hand_Based_3D_Interaction_in_Optical_See_Through_Augmented_Reality_Using_Leap_Motion"><img alt="Research paper thumbnail of Free Hand-Based 3D Interaction in Optical See-Through Augmented Reality Using Leap Motion" class="work-thumbnail" src="https://attachments.academia-assets.com/109098696/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609811/Free_Hand_Based_3D_Interaction_in_Optical_See_Through_Augmented_Reality_Using_Leap_Motion">Free Hand-Based 3D Interaction in Optical See-Through Augmented Reality Using Leap Motion</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">In augmented reality environments, the natural hand interaction between a virtual object and the ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">In augmented reality environments, the natural hand interaction between a virtual object and the user is a major issue to manipulate a rendered object in a convenient way. Microsoft’s HoloLens (Microsoft 2018) is an innovative augmented reality (AR) device that has provided an impressive experience for the user. However, the gesture interactions offered to the user are very limited. HoloLens currently recognizes two core component gestures: Air tap and Bloom. To solve this issue, we propose to integrate a Leap Motion Controller (LMC) within the HoloLens device (Figure 1). We thus used 3D hand and finger tracking provided by the LMC (Lu &amp; al., 2016) to propose new free hand-based interaction more natural and intuitive. We implemented three fully 3D techniques for selection, translation and rotation manipulation. In this work, we first investigated how to combine the two devices to get them working together in real time, and then we evaluated the proposed 3D hand interactions.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="de7247812016efae9b19291984428968" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098696,&quot;asset_id&quot;:111609811,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098696/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609811"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609811"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609811; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609811]").text(description); $(".js-view-count[data-work-id=111609811]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609811; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609811']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609811, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "de7247812016efae9b19291984428968" } } $('.js-work-strip[data-work-id=111609811]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609811,"title":"Free Hand-Based 3D Interaction in Optical See-Through Augmented Reality Using Leap Motion","translated_title":"","metadata":{"abstract":"In augmented reality environments, the natural hand interaction between a virtual object and the user is a major issue to manipulate a rendered object in a convenient way. Microsoft’s HoloLens (Microsoft 2018) is an innovative augmented reality (AR) device that has provided an impressive experience for the user. However, the gesture interactions offered to the user are very limited. HoloLens currently recognizes two core component gestures: Air tap and Bloom. To solve this issue, we propose to integrate a Leap Motion Controller (LMC) within the HoloLens device (Figure 1). We thus used 3D hand and finger tracking provided by the LMC (Lu \u0026 al., 2016) to propose new free hand-based interaction more natural and intuitive. We implemented three fully 3D techniques for selection, translation and rotation manipulation. In this work, we first investigated how to combine the two devices to get them working together in real time, and then we evaluated the proposed 3D hand interactions.","publication_date":{"day":null,"month":null,"year":2018,"errors":{}}},"translated_abstract":"In augmented reality environments, the natural hand interaction between a virtual object and the user is a major issue to manipulate a rendered object in a convenient way. Microsoft’s HoloLens (Microsoft 2018) is an innovative augmented reality (AR) device that has provided an impressive experience for the user. However, the gesture interactions offered to the user are very limited. HoloLens currently recognizes two core component gestures: Air tap and Bloom. To solve this issue, we propose to integrate a Leap Motion Controller (LMC) within the HoloLens device (Figure 1). We thus used 3D hand and finger tracking provided by the LMC (Lu \u0026 al., 2016) to propose new free hand-based interaction more natural and intuitive. We implemented three fully 3D techniques for selection, translation and rotation manipulation. In this work, we first investigated how to combine the two devices to get them working together in real time, and then we evaluated the proposed 3D hand interactions.","internal_url":"https://www.academia.edu/111609811/Free_Hand_Based_3D_Interaction_in_Optical_See_Through_Augmented_Reality_Using_Leap_Motion","translated_internal_url":"","created_at":"2023-12-16T23:55:57.410-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098696,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098696/thumbnails/1.jpg","file_name":"LISPEN_EuroVR_2018_ABABSA.pdf","download_url":"https://www.academia.edu/attachments/109098696/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Free_Hand_Based_3D_Interaction_in_Optica.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098696/LISPEN_EuroVR_2018_ABABSA-libre.pdf?1702800349=\u0026response-content-disposition=attachment%3B+filename%3DFree_Hand_Based_3D_Interaction_in_Optica.pdf\u0026Expires=1733201480\u0026Signature=LPrk--QNFFIaDXJ~evWsFlGW94mvXT51WDmaQCgPD49ovrEMX3hSxyww6eLVzAYcpdJFunBJnlNM~Fxo9oGZwielyjRrt2vg6Eze2M2hAVQzWSNu15aIV3Vo3H5E0~5YjrNI4jit4~haBQ~xcrKuwezVRmahWpWIZF4If4GlFCHAwWBaZLqoOPGrFEUDT9-9U07t-ysC-biT5t~Hw--RzGnRHUaxD3qoO6f-R8QcW9wfmSOQAFCJFwz0KqmtwhAcY2k82TozGFv1yuJH4xDV2JtCbcdq3-~z6kyaggda60BcsAYyGZP1MyubSmNuZLnsoz1GE7dpSW4Pt24qOvYmZg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Free_Hand_Based_3D_Interaction_in_Optical_See_Through_Augmented_Reality_Using_Leap_Motion","translated_slug":"","page_count":3,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098696,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098696/thumbnails/1.jpg","file_name":"LISPEN_EuroVR_2018_ABABSA.pdf","download_url":"https://www.academia.edu/attachments/109098696/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Free_Hand_Based_3D_Interaction_in_Optica.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098696/LISPEN_EuroVR_2018_ABABSA-libre.pdf?1702800349=\u0026response-content-disposition=attachment%3B+filename%3DFree_Hand_Based_3D_Interaction_in_Optica.pdf\u0026Expires=1733201480\u0026Signature=LPrk--QNFFIaDXJ~evWsFlGW94mvXT51WDmaQCgPD49ovrEMX3hSxyww6eLVzAYcpdJFunBJnlNM~Fxo9oGZwielyjRrt2vg6Eze2M2hAVQzWSNu15aIV3Vo3H5E0~5YjrNI4jit4~haBQ~xcrKuwezVRmahWpWIZF4If4GlFCHAwWBaZLqoOPGrFEUDT9-9U07t-ysC-biT5t~Hw--RzGnRHUaxD3qoO6f-R8QcW9wfmSOQAFCJFwz0KqmtwhAcY2k82TozGFv1yuJH4xDV2JtCbcdq3-~z6kyaggda60BcsAYyGZP1MyubSmNuZLnsoz1GE7dpSW4Pt24qOvYmZg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"},{"id":109098700,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098700/thumbnails/1.jpg","file_name":"LISPEN_EuroVR_2018_ABABSA.pdf","download_url":"https://www.academia.edu/attachments/109098700/download_file","bulk_download_file_name":"Free_Hand_Based_3D_Interaction_in_Optica.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098700/LISPEN_EuroVR_2018_ABABSA-libre.pdf?1702800347=\u0026response-content-disposition=attachment%3B+filename%3DFree_Hand_Based_3D_Interaction_in_Optica.pdf\u0026Expires=1733201480\u0026Signature=UmV~UwXUNN3tKsXgLzAN3haUO0DJ3LBWW7nsk6uuD-reFaZBByxdI7dzkejahlZxGvfd6N-qdzRVmIb1OE-TVtpbBfglaz9r~I5XzHbztJ8Ki3T6HDxN61rraFSVSzKj~k6Xp4v2WT~krCzRtWs60W10vqQ3j~ZJB6TjbA-wxrdB0753WHj-4tk12zZ~kUGcKPrn1Z4aF8K4XYgc-9C8fbzf3EBiATSd7g2vJOdJ7u0YOpwpeu2~pwzcTQvFiY0djejRIxHXyYwZjfKHYxz7fJh-HVRDudV2qRQSDOYRabwgIuzGTehx0pggcF~h678jmmoVME8jBTNTFFBeXpMC4w__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":5673,"name":"Augmented Reality","url":"https://www.academia.edu/Documents/in/Augmented_Reality"},{"id":96156,"name":"Natural interaction","url":"https://www.academia.edu/Documents/in/Natural_interaction"},{"id":96893,"name":"Calibration","url":"https://www.academia.edu/Documents/in/Calibration"},{"id":1167359,"name":"Leap Motion","url":"https://www.academia.edu/Documents/in/Leap_Motion"},{"id":2668784,"name":"HoloLens","url":"https://www.academia.edu/Documents/in/HoloLens-1"}],"urls":[{"id":37229930,"url":"https://sam.ensam.eu/bitstream/handle/10985/14475/LISPEN_EuroVR_2018_ABABSA.pdf?isAllowed=y\u0026sequence=1"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609810"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609810/Reconnaissance_3D_des_Gestes_pour_l_Interaction_Naturelle_Homme_Robot"><img alt="Research paper thumbnail of Reconnaissance 3D des Gestes pour l’Interaction Naturelle Homme Robot" class="work-thumbnail" src="https://attachments.academia-assets.com/109098684/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609810/Reconnaissance_3D_des_Gestes_pour_l_Interaction_Naturelle_Homme_Robot">Reconnaissance 3D des Gestes pour l’Interaction Naturelle Homme Robot</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Dans ce papier, nous proposons une methode de reconnaissance 3D des gestes pour l’interaction hom...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Dans ce papier, nous proposons une methode de reconnaissance 3D des gestes pour l’interaction homme robot (HRI) basee sur l’information de profondeur fournie par la Kinect. Le suivi du corps est realise avec l’algorithme Skeleton fourni par le Kinect SDK. L’idee de ce travail est de calculer les angles des articulations de la partie supe- rieure du corps durant l’execution du geste. Les variations de ces angles seront les entrees des Modeles de Markov Caches afin de reconnaitre les gestes dynamiques. Les re- sultats montrent que notre methode est tres robuste ; elle necessite peu de pretraitements et n’est pas influencee par les conditions de l’environnement comme les changements d’eclairage et la complexite de la scene.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="39bd8ec1c810cc4c7b6602f4e0bd7f4a" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098684,&quot;asset_id&quot;:111609810,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098684/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609810"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609810"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609810; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609810]").text(description); $(".js-view-count[data-work-id=111609810]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609810; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609810']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609810, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "39bd8ec1c810cc4c7b6602f4e0bd7f4a" } } $('.js-work-strip[data-work-id=111609810]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609810,"title":"Reconnaissance 3D des Gestes pour l’Interaction Naturelle Homme Robot","translated_title":"","metadata":{"abstract":"Dans ce papier, nous proposons une methode de reconnaissance 3D des gestes pour l’interaction homme robot (HRI) basee sur l’information de profondeur fournie par la Kinect. Le suivi du corps est realise avec l’algorithme Skeleton fourni par le Kinect SDK. L’idee de ce travail est de calculer les angles des articulations de la partie supe- rieure du corps durant l’execution du geste. Les variations de ces angles seront les entrees des Modeles de Markov Caches afin de reconnaitre les gestes dynamiques. Les re- sultats montrent que notre methode est tres robuste ; elle necessite peu de pretraitements et n’est pas influencee par les conditions de l’environnement comme les changements d’eclairage et la complexite de la scene.","publication_date":{"day":null,"month":null,"year":2015,"errors":{}}},"translated_abstract":"Dans ce papier, nous proposons une methode de reconnaissance 3D des gestes pour l’interaction homme robot (HRI) basee sur l’information de profondeur fournie par la Kinect. Le suivi du corps est realise avec l’algorithme Skeleton fourni par le Kinect SDK. L’idee de ce travail est de calculer les angles des articulations de la partie supe- rieure du corps durant l’execution du geste. Les variations de ces angles seront les entrees des Modeles de Markov Caches afin de reconnaitre les gestes dynamiques. Les re- sultats montrent que notre methode est tres robuste ; elle necessite peu de pretraitements et n’est pas influencee par les conditions de l’environnement comme les changements d’eclairage et la complexite de la scene.","internal_url":"https://www.academia.edu/111609810/Reconnaissance_3D_des_Gestes_pour_l_Interaction_Naturelle_Homme_Robot","translated_internal_url":"","created_at":"2023-12-16T23:55:57.098-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098684,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098684/thumbnails/1.jpg","file_name":"document.pdf","download_url":"https://www.academia.edu/attachments/109098684/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Reconnaissance_3D_des_Gestes_pour_l_Inte.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098684/document-libre.pdf?1702800355=\u0026response-content-disposition=attachment%3B+filename%3DReconnaissance_3D_des_Gestes_pour_l_Inte.pdf\u0026Expires=1733201480\u0026Signature=YeHjmvf-8r36VXlvgEEvcUJmpHiz0G7OdZUlTvhzoWcXYM7nkIsnfAso5v0SaJib03pNfr6nJrjT2vJGbV0Yjg0LUGFispueszbKe72h7PpbwAsdasH~hmJViyN2xUCV5BeBas5UPjnkh9gy3dgvmHdFOoBvT4OVfwfZKVgrt71IxPUWJRoer8qIKmcikNrdj~DowBOb1Da5x~pAHq~Vf0cQwT4SNcOcySDXBt-ijoDVsmrQeBAsp77Wdv~2xOy4pr54Wiu5uQAt9OS-oRW8Prh9GHLlBdJ7~XpEywng5Z4FHAiUMv1y~g1ArR1E4OLYPKn8fO-XTrYDKktS5bbGGw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Reconnaissance_3D_des_Gestes_pour_l_Interaction_Naturelle_Homme_Robot","translated_slug":"","page_count":9,"language":"fr","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098684,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098684/thumbnails/1.jpg","file_name":"document.pdf","download_url":"https://www.academia.edu/attachments/109098684/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Reconnaissance_3D_des_Gestes_pour_l_Inte.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098684/document-libre.pdf?1702800355=\u0026response-content-disposition=attachment%3B+filename%3DReconnaissance_3D_des_Gestes_pour_l_Inte.pdf\u0026Expires=1733201480\u0026Signature=YeHjmvf-8r36VXlvgEEvcUJmpHiz0G7OdZUlTvhzoWcXYM7nkIsnfAso5v0SaJib03pNfr6nJrjT2vJGbV0Yjg0LUGFispueszbKe72h7PpbwAsdasH~hmJViyN2xUCV5BeBas5UPjnkh9gy3dgvmHdFOoBvT4OVfwfZKVgrt71IxPUWJRoer8qIKmcikNrdj~DowBOb1Da5x~pAHq~Vf0cQwT4SNcOcySDXBt-ijoDVsmrQeBAsp77Wdv~2xOy4pr54Wiu5uQAt9OS-oRW8Prh9GHLlBdJ7~XpEywng5Z4FHAiUMv1y~g1ArR1E4OLYPKn8fO-XTrYDKktS5bbGGw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"},{"id":109098689,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098689/thumbnails/1.jpg","file_name":"document.pdf","download_url":"https://www.academia.edu/attachments/109098689/download_file","bulk_download_file_name":"Reconnaissance_3D_des_Gestes_pour_l_Inte.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098689/document-libre.pdf?1702800354=\u0026response-content-disposition=attachment%3B+filename%3DReconnaissance_3D_des_Gestes_pour_l_Inte.pdf\u0026Expires=1733201480\u0026Signature=aAN5hpP3cgwXFRc~80H10GM7-g-wf6qzs2pkI-bR9wj0xAsmkii26IuMBtZ-J7yswQ5eHnk2cuxgeGnDDHpZqSqZVGwz02q~rK~rltT3kUaoCz~6YmJrCXsCmEkRv4rtltI7yFkVd7-N2MHRVYVF8C~S3UmJPmST~w6DiD74sy1bxebwsV8goKgTnZKnZugnm0veAEelff2NwbcCPAst-RRQBvjsqVj5haE0~Cr2mxZnK4~iESZmy3iw3fb9kmO0AQCyM4PuLl2q0gxXzEf5LaIhi0SfJg2st2-Fs9WNIXmGTuHkN6PE51hWmQ7NrrO1iElbqGcVgURwODbhy8wSuw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":951,"name":"Humanities","url":"https://www.academia.edu/Documents/in/Humanities"},{"id":1236,"name":"Art","url":"https://www.academia.edu/Documents/in/Art"},{"id":68937,"name":"Hidden Markov Models","url":"https://www.academia.edu/Documents/in/Hidden_Markov_Models"},{"id":4078440,"name":"gesture tracking","url":"https://www.academia.edu/Documents/in/gesture_tracking"}],"urls":[{"id":37229929,"url":"https://hal.archives-ouvertes.fr/hal-01177441/document"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609808"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609808/Dynamic_Gesture_Recognition_for_Natural_Human_System_Interaction"><img alt="Research paper thumbnail of Dynamic Gesture Recognition for Natural Human System Interaction" class="work-thumbnail" src="https://attachments.academia-assets.com/109098683/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609808/Dynamic_Gesture_Recognition_for_Natural_Human_System_Interaction">Dynamic Gesture Recognition for Natural Human System Interaction</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">This paper addresses two problems: 3d dynamic gesture recognition and gesture misallocation. In o...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">This paper addresses two problems: 3d dynamic gesture recognition and gesture misallocation. In order to solve these problems, we propose a new approach which combines Hidden Markov Models (HMM) and Dynamic Time Warping (DTW). The proposed approach has two main phases; first, recognizing gestures using a hidden Markov model. Second, avoiding misallocation by rejecting gestures based on a threshold computed using DTW. Our database includes many samples of five gestures obtained with a Kinect and described by depth information only. The results show that our approach yields good gesture classification without any misallocation and it is robust against environmental constraints.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="14717555d7c08ea1beffea23f93dea0d" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098683,&quot;asset_id&quot;:111609808,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098683/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609808"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609808"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609808; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609808]").text(description); $(".js-view-count[data-work-id=111609808]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609808; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609808']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609808, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "14717555d7c08ea1beffea23f93dea0d" } } $('.js-work-strip[data-work-id=111609808]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609808,"title":"Dynamic Gesture Recognition for Natural Human System Interaction","translated_title":"","metadata":{"abstract":"This paper addresses two problems: 3d dynamic gesture recognition and gesture misallocation. In order to solve these problems, we propose a new approach which combines Hidden Markov Models (HMM) and Dynamic Time Warping (DTW). The proposed approach has two main phases; first, recognizing gestures using a hidden Markov model. Second, avoiding misallocation by rejecting gestures based on a threshold computed using DTW. Our database includes many samples of five gestures obtained with a Kinect and described by depth information only. The results show that our approach yields good gesture classification without any misallocation and it is robust against environmental constraints.","publication_date":{"day":null,"month":null,"year":2016,"errors":{}}},"translated_abstract":"This paper addresses two problems: 3d dynamic gesture recognition and gesture misallocation. In order to solve these problems, we propose a new approach which combines Hidden Markov Models (HMM) and Dynamic Time Warping (DTW). The proposed approach has two main phases; first, recognizing gestures using a hidden Markov model. Second, avoiding misallocation by rejecting gestures based on a threshold computed using DTW. Our database includes many samples of five gestures obtained with a Kinect and described by depth information only. The results show that our approach yields good gesture classification without any misallocation and it is robust against environmental constraints.","internal_url":"https://www.academia.edu/111609808/Dynamic_Gesture_Recognition_for_Natural_Human_System_Interaction","translated_internal_url":"","created_at":"2023-12-16T23:55:56.906-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098683,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098683/thumbnails/1.jpg","file_name":"17Vol91No2.pdf","download_url":"https://www.academia.edu/attachments/109098683/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Dynamic_Gesture_Recognition_for_Natural.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098683/17Vol91No2-libre.pdf?1702800359=\u0026response-content-disposition=attachment%3B+filename%3DDynamic_Gesture_Recognition_for_Natural.pdf\u0026Expires=1733201480\u0026Signature=BXy0nNhedw-iVwo5syqTro6WkOadoVbjbrkA1o0WEfsdEhKPlLMM-4akHCZDc~x92qZxIO6Yox2pRX9IYAQiJANWfWM-EHStKDvgeyViuQoV9KqZagB5Sw-cQbxJZthSzBfphbtpkxMAczyBsfv2TsgOX9UEx3ruvgS2-MD~SE5PfqOrtsXe0EDRdSKNg2SYkGCDwVPffEFJCwdisUyVAyoJz8U4onXFK1zw51anncHuddVN9XWHmtSngsajz~w~wsyXLWMRDi-ThiKjM~Vm10PyDnFy9FKqEIVjIEGcaLyYZrfZ3nwAnZKdfb8qjTZ5T5Z4tpGMSTHFIKGjRg6Brw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Dynamic_Gesture_Recognition_for_Natural_Human_System_Interaction","translated_slug":"","page_count":10,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098683,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098683/thumbnails/1.jpg","file_name":"17Vol91No2.pdf","download_url":"https://www.academia.edu/attachments/109098683/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Dynamic_Gesture_Recognition_for_Natural.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098683/17Vol91No2-libre.pdf?1702800359=\u0026response-content-disposition=attachment%3B+filename%3DDynamic_Gesture_Recognition_for_Natural.pdf\u0026Expires=1733201480\u0026Signature=BXy0nNhedw-iVwo5syqTro6WkOadoVbjbrkA1o0WEfsdEhKPlLMM-4akHCZDc~x92qZxIO6Yox2pRX9IYAQiJANWfWM-EHStKDvgeyViuQoV9KqZagB5Sw-cQbxJZthSzBfphbtpkxMAczyBsfv2TsgOX9UEx3ruvgS2-MD~SE5PfqOrtsXe0EDRdSKNg2SYkGCDwVPffEFJCwdisUyVAyoJz8U4onXFK1zw51anncHuddVN9XWHmtSngsajz~w~wsyXLWMRDi-ThiKjM~Vm10PyDnFy9FKqEIVjIEGcaLyYZrfZ3nwAnZKdfb8qjTZ5T5Z4tpGMSTHFIKGjRg6Brw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence"},{"id":472,"name":"Human Computer Interaction","url":"https://www.academia.edu/Documents/in/Human_Computer_Interaction"},{"id":3147,"name":"Gesture","url":"https://www.academia.edu/Documents/in/Gesture"},{"id":17701,"name":"Gesture Recognition","url":"https://www.academia.edu/Documents/in/Gesture_Recognition"},{"id":91387,"name":"Kinect","url":"https://www.academia.edu/Documents/in/Kinect"},{"id":143539,"name":"hidden Markov model","url":"https://www.academia.edu/Documents/in/hidden_Markov_model"},{"id":432494,"name":"Dynamic Time Warping","url":"https://www.academia.edu/Documents/in/Dynamic_Time_Warping"},{"id":1350274,"name":"Archaeology of Natural Places","url":"https://www.academia.edu/Documents/in/Archaeology_of_Natural_Places"}],"urls":[{"id":37229928,"url":"http://www.jatit.org/volumes/Vol91No2/17Vol91No2.pdf"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609807"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609807/Combining_HoloLens_and_Leap_Motion_for_Free_Hand_Based_3D_Interaction_in_MR_Environments"><img alt="Research paper thumbnail of Combining HoloLens and Leap-Motion for Free Hand-Based 3D Interaction in MR Environments" class="work-thumbnail" src="https://attachments.academia-assets.com/109098737/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609807/Combining_HoloLens_and_Leap_Motion_for_Free_Hand_Based_3D_Interaction_in_MR_Environments">Combining HoloLens and Leap-Motion for Free Hand-Based 3D Interaction in MR Environments</a></div><div class="wp-workCard_item"><span>Lecture Notes in Computer Science</span><span>, 2020</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="3a00f5ff7e83c04d6462451f6e8b2d85" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098737,&quot;asset_id&quot;:111609807,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098737/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609807"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609807"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609807; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609807]").text(description); $(".js-view-count[data-work-id=111609807]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609807; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609807']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609807, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "3a00f5ff7e83c04d6462451f6e8b2d85" } } $('.js-work-strip[data-work-id=111609807]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609807,"title":"Combining HoloLens and Leap-Motion for Free Hand-Based 3D Interaction in MR Environments","translated_title":"","metadata":{"publisher":"Springer International Publishing","grobid_abstract":"The ability to interact with virtual objects using gestures would allow users to improve their experience in Mixed Reality (MR) environments, especially when they use AR headsets. Today, MR head-mounted displays like the HoloLens integrate hand gesture based interaction allowing users to take actions in MR environments. However, the proposed interactions remain limited. In this paper, we propose to combine a Leap Motion Controller (LMC) with a HoloLens in order to improve gesture interaction with virtual objects. Two main issues are presented: an interactive calibration procedure for the coupled HoloLens-LMC device and an intuitive hand-based interaction approach using LMC data in the HoloLens environment. A set of first experiments was carried out to evaluate the accuracy and the usability of the proposed approach.","publication_date":{"day":null,"month":null,"year":2020,"errors":{}},"publication_name":"Lecture Notes in Computer Science","grobid_abstract_attachment_id":109098737},"translated_abstract":null,"internal_url":"https://www.academia.edu/111609807/Combining_HoloLens_and_Leap_Motion_for_Free_Hand_Based_3D_Interaction_in_MR_Environments","translated_internal_url":"","created_at":"2023-12-16T23:55:56.662-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098737,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098737/thumbnails/1.jpg","file_name":"LISPEN_2020_AVR_ABABSA.pdf","download_url":"https://www.academia.edu/attachments/109098737/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Combining_HoloLens_and_Leap_Motion_for_F.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098737/LISPEN_2020_AVR_ABABSA-libre.pdf?1702800348=\u0026response-content-disposition=attachment%3B+filename%3DCombining_HoloLens_and_Leap_Motion_for_F.pdf\u0026Expires=1733201480\u0026Signature=AEpeTFzEtmMcvnr11H1sLg7lDgTgmDTsB-yYTJnxQP~73pjvZNxY4oIDVeIt~u~mI91KO4E3XExsISWDyCLy1q9qpmip9YQ4RBxXCwU0MaAQnvepbxaAzPR2LIyedmFxnLffzQzJriUTERNOeZFb8njaDaFtQ1WjRdDdNV2SveLY45xVzKPlDh45Kmy55sLFrAAUeTJnCYeIB5s9SgkmKV1NPOq2rKbDt~Y26PIk3-UVQkFXzXYy0ORZxQ5isKHgcFnRXZZPQ703end0~q3pOhLlArnKe6097pldvBVWMCWN4T8AyTajMFWE2LuDfLxvuf~DjZwZHJBITa3WSrmpLQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Combining_HoloLens_and_Leap_Motion_for_Free_Hand_Based_3D_Interaction_in_MR_Environments","translated_slug":"","page_count":14,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098737,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098737/thumbnails/1.jpg","file_name":"LISPEN_2020_AVR_ABABSA.pdf","download_url":"https://www.academia.edu/attachments/109098737/download_file?st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Combining_HoloLens_and_Leap_Motion_for_F.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098737/LISPEN_2020_AVR_ABABSA-libre.pdf?1702800348=\u0026response-content-disposition=attachment%3B+filename%3DCombining_HoloLens_and_Leap_Motion_for_F.pdf\u0026Expires=1733201480\u0026Signature=AEpeTFzEtmMcvnr11H1sLg7lDgTgmDTsB-yYTJnxQP~73pjvZNxY4oIDVeIt~u~mI91KO4E3XExsISWDyCLy1q9qpmip9YQ4RBxXCwU0MaAQnvepbxaAzPR2LIyedmFxnLffzQzJriUTERNOeZFb8njaDaFtQ1WjRdDdNV2SveLY45xVzKPlDh45Kmy55sLFrAAUeTJnCYeIB5s9SgkmKV1NPOq2rKbDt~Y26PIk3-UVQkFXzXYy0ORZxQ5isKHgcFnRXZZPQ703end0~q3pOhLlArnKe6097pldvBVWMCWN4T8AyTajMFWE2LuDfLxvuf~DjZwZHJBITa3WSrmpLQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":472,"name":"Human Computer Interaction","url":"https://www.academia.edu/Documents/in/Human_Computer_Interaction"},{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision"},{"id":1615,"name":"Usability","url":"https://www.academia.edu/Documents/in/Usability"},{"id":3147,"name":"Gesture","url":"https://www.academia.edu/Documents/in/Gesture"},{"id":39370,"name":"Mixed Reality","url":"https://www.academia.edu/Documents/in/Mixed_Reality"},{"id":96156,"name":"Natural interaction","url":"https://www.academia.edu/Documents/in/Natural_interaction"},{"id":96893,"name":"Calibration","url":"https://www.academia.edu/Documents/in/Calibration"}],"urls":[{"id":37229926,"url":"https://link.springer.com/content/pdf/10.1007/978-3-030-58465-8_24"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609806"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609806/3D_Human_Tracking_with_Catadioptric_Omnidirectional_Camera"><img alt="Research paper thumbnail of 3D Human Tracking with Catadioptric Omnidirectional Camera" class="work-thumbnail" src="https://attachments.academia-assets.com/109098770/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609806/3D_Human_Tracking_with_Catadioptric_Omnidirectional_Camera">3D Human Tracking with Catadioptric Omnidirectional Camera</a></div><div class="wp-workCard_item"><span>Proceedings of the 2019 on International Conference on Multimedia Retrieval</span><span>, 2019</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="6f0271e0e5bf2ef2df5e822617413195" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098770,&quot;asset_id&quot;:111609806,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098770/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609806"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609806"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609806; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609806]").text(description); $(".js-view-count[data-work-id=111609806]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609806; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609806']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609806, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "6f0271e0e5bf2ef2df5e822617413195" } } $('.js-work-strip[data-work-id=111609806]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609806,"title":"3D Human Tracking with Catadioptric Omnidirectional Camera","translated_title":"","metadata":{"publisher":"ACM","ai_title_tag":"3D Human Tracking Using Catadioptric Omnidirectional Cameras","grobid_abstract":"This paper deals with the problem of 3D human tracking in catadioptric images using particle-filtering framework. While traditional perspective images are well exploited, only a few methods have been developed for catadioptric vision, for the human detection or tracking problems. We propose to extend the 3D pose estimation in the case of perspective cameras to catadioptric sensors. In this paper, we develop an original likelihood functions based, on the one hand, on the geodetic distance in the spherical space SO 3 and, on the other hand, on the mapping between the human silhouette in the images and the projected 3D model. These likelihood functions combined with a particle filter, whose propagation model is adapted to the spherical space, allow accurate 3D human tracking in omnidirectional images. Both visual and quantitative analysis of the experimental results demonstrate the effectiveness of our approach","publication_date":{"day":null,"month":null,"year":2019,"errors":{}},"publication_name":"Proceedings of the 2019 on International Conference on Multimedia Retrieval","grobid_abstract_attachment_id":109098770},"translated_abstract":null,"internal_url":"https://www.academia.edu/111609806/3D_Human_Tracking_with_Catadioptric_Omnidirectional_Camera","translated_internal_url":"","created_at":"2023-12-16T23:55:56.454-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098770,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098770/thumbnails/1.jpg","file_name":"LISPEN_ICMR_ABABSA_2019.pdf","download_url":"https://www.academia.edu/attachments/109098770/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"3D_Human_Tracking_with_Catadioptric_Omni.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098770/LISPEN_ICMR_ABABSA_2019-libre.pdf?1702800341=\u0026response-content-disposition=attachment%3B+filename%3D3D_Human_Tracking_with_Catadioptric_Omni.pdf\u0026Expires=1733201480\u0026Signature=Zhzx7TqO6c4iaJTU9NVkfzX317XDgiCMOvXsxHAuKnOMToDR4SVCVkh4jS7YzMtO0goHNo9c1UyH3MdTG1RlQUWnkYV2YVaK1nmgdMMUI1hySr1tdBcuz~v8YDlSHoQr2xts9PLyDNqIbBhsyzBNYaEPnhTN8EVr00PHV1F5-pLpSF5LjnjnHuzlqYYjf8ZrNIL0FnJYcx0G8C6E2FhjERRM53Ytoe4mIwIjXoOU4FgpQvg~2sz-VOjIn3TmHf8P~DHw5D2YoNtl9CTLdIHg5dYRQiMAw7o-KHvP8sTyKEa~lJzUJBX7bRDtkmP5AmARj4vQzR0Vt3ngPvASXoWeCQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"3D_Human_Tracking_with_Catadioptric_Omnidirectional_Camera","translated_slug":"","page_count":6,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098770,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098770/thumbnails/1.jpg","file_name":"LISPEN_ICMR_ABABSA_2019.pdf","download_url":"https://www.academia.edu/attachments/109098770/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"3D_Human_Tracking_with_Catadioptric_Omni.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098770/LISPEN_ICMR_ABABSA_2019-libre.pdf?1702800341=\u0026response-content-disposition=attachment%3B+filename%3D3D_Human_Tracking_with_Catadioptric_Omni.pdf\u0026Expires=1733201480\u0026Signature=Zhzx7TqO6c4iaJTU9NVkfzX317XDgiCMOvXsxHAuKnOMToDR4SVCVkh4jS7YzMtO0goHNo9c1UyH3MdTG1RlQUWnkYV2YVaK1nmgdMMUI1hySr1tdBcuz~v8YDlSHoQr2xts9PLyDNqIbBhsyzBNYaEPnhTN8EVr00PHV1F5-pLpSF5LjnjnHuzlqYYjf8ZrNIL0FnJYcx0G8C6E2FhjERRM53Ytoe4mIwIjXoOU4FgpQvg~2sz-VOjIn3TmHf8P~DHw5D2YoNtl9CTLdIHg5dYRQiMAw7o-KHvP8sTyKEa~lJzUJBX7bRDtkmP5AmARj4vQzR0Vt3ngPvASXoWeCQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence"},{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision"},{"id":100874,"name":"Human Tracking","url":"https://www.academia.edu/Documents/in/Human_Tracking"},{"id":174252,"name":"Particle Filtering","url":"https://www.academia.edu/Documents/in/Particle_Filtering"},{"id":261121,"name":"Particle Filter","url":"https://www.academia.edu/Documents/in/Particle_Filter"},{"id":1646933,"name":"Egomotion","url":"https://www.academia.edu/Documents/in/Egomotion"},{"id":1648187,"name":"Silhouette","url":"https://www.academia.edu/Documents/in/Silhouette"},{"id":2755698,"name":"Omnidirectional Camera","url":"https://www.academia.edu/Documents/in/Omnidirectional_Camera"},{"id":4100396,"name":"Omnidirectional antenna ","url":"https://www.academia.edu/Documents/in/Omnidirectional_antenna"}],"urls":[{"id":37229925,"url":"https://dl.acm.org/doi/pdf/10.1145/3323873.3325027"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609805"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609805/Augmented_Reality_Application_in_Manufacturing_Industry_Maintenance_and_Non_destructive_Testing_NDT_Use_Cases"><img alt="Research paper thumbnail of Augmented Reality Application in Manufacturing Industry: Maintenance and Non-destructive Testing (NDT) Use Cases" class="work-thumbnail" src="https://attachments.academia-assets.com/109098736/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609805/Augmented_Reality_Application_in_Manufacturing_Industry_Maintenance_and_Non_destructive_Testing_NDT_Use_Cases">Augmented Reality Application in Manufacturing Industry: Maintenance and Non-destructive Testing (NDT) Use Cases</a></div><div class="wp-workCard_item"><span>Lecture Notes in Computer Science</span><span>, 2020</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="be65ec809ef9fbfad02588ad19daac14" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098736,&quot;asset_id&quot;:111609805,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098736/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609805"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609805"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609805; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609805]").text(description); $(".js-view-count[data-work-id=111609805]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609805; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609805']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609805, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "be65ec809ef9fbfad02588ad19daac14" } } $('.js-work-strip[data-work-id=111609805]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609805,"title":"Augmented Reality Application in Manufacturing Industry: Maintenance and Non-destructive Testing (NDT) Use Cases","translated_title":"","metadata":{"publisher":"Springer International Publishing","grobid_abstract":"In recent years, a structural transformation of the manufacturing industry has been occurring as a result of the digital revolution. Thus, digital tools are now systematically used throughout the entire value chain, from design to production to marketing, especially virtual and augmented reality. Therefore, the purpose of this paper is to review, through concrete use cases, the progress of these novel technologies and their use in the manufacturing industry.","publication_date":{"day":null,"month":null,"year":2020,"errors":{}},"publication_name":"Lecture Notes in Computer Science","grobid_abstract_attachment_id":109098736},"translated_abstract":null,"internal_url":"https://www.academia.edu/111609805/Augmented_Reality_Application_in_Manufacturing_Industry_Maintenance_and_Non_destructive_Testing_NDT_Use_Cases","translated_internal_url":"","created_at":"2023-12-16T23:55:56.237-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098736,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098736/thumbnails/1.jpg","file_name":"LISPEN_AVR_2020_ABABSA_2.pdf","download_url":"https://www.academia.edu/attachments/109098736/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Augmented_Reality_Application_in_Manufac.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098736/LISPEN_AVR_2020_ABABSA_2-libre.pdf?1702800349=\u0026response-content-disposition=attachment%3B+filename%3DAugmented_Reality_Application_in_Manufac.pdf\u0026Expires=1733201481\u0026Signature=gTs-VKF2ezmr-4Mwqwnxq~3dLZyItX35AKELmD2oYP5jogKIT~VJcl55iV0StL1DBQHn75wWGp0OinViKu-5TsJpnwE8ZQnIA3xX0nwc87r7-Pd3d3snvOChCw4QV4VcGUBum80zkA6waC4bdh~pTavWypVvMvlxznco3KAiyd9sOe5K-z85cmZ2KG41von-odaOYsYHbWdmstGP1dQkrYYjLPjyZSchOtEa1Rnrm1uIaQrO5mW9OBDv-kB-K8pXVSbf4DCFD1wGTayZEeQqWh2MdaJp30Ed3ExWQuyQtPujeP3GIVOtZnzYSvJt1S~HR15ZdX3V0vmla0C~AnTwpw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Augmented_Reality_Application_in_Manufacturing_Industry_Maintenance_and_Non_destructive_Testing_NDT_Use_Cases","translated_slug":"","page_count":13,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098736,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098736/thumbnails/1.jpg","file_name":"LISPEN_AVR_2020_ABABSA_2.pdf","download_url":"https://www.academia.edu/attachments/109098736/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Augmented_Reality_Application_in_Manufac.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098736/LISPEN_AVR_2020_ABABSA_2-libre.pdf?1702800349=\u0026response-content-disposition=attachment%3B+filename%3DAugmented_Reality_Application_in_Manufac.pdf\u0026Expires=1733201481\u0026Signature=gTs-VKF2ezmr-4Mwqwnxq~3dLZyItX35AKELmD2oYP5jogKIT~VJcl55iV0StL1DBQHn75wWGp0OinViKu-5TsJpnwE8ZQnIA3xX0nwc87r7-Pd3d3snvOChCw4QV4VcGUBum80zkA6waC4bdh~pTavWypVvMvlxznco3KAiyd9sOe5K-z85cmZ2KG41von-odaOYsYHbWdmstGP1dQkrYYjLPjyZSchOtEa1Rnrm1uIaQrO5mW9OBDv-kB-K8pXVSbf4DCFD1wGTayZEeQqWh2MdaJp30Ed3ExWQuyQtPujeP3GIVOtZnzYSvJt1S~HR15ZdX3V0vmla0C~AnTwpw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":48,"name":"Engineering","url":"https://www.academia.edu/Documents/in/Engineering"},{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":2979,"name":"Manufacturing","url":"https://www.academia.edu/Documents/in/Manufacturing"},{"id":5673,"name":"Augmented Reality","url":"https://www.academia.edu/Documents/in/Augmented_Reality"},{"id":51166,"name":"Nondestructive testing","url":"https://www.academia.edu/Documents/in/Nondestructive_testing"},{"id":85280,"name":"Industry","url":"https://www.academia.edu/Documents/in/Industry"},{"id":96825,"name":"Manufacturing Engineering","url":"https://www.academia.edu/Documents/in/Manufacturing_Engineering"},{"id":379854,"name":"Human Machine Interaction","url":"https://www.academia.edu/Documents/in/Human_Machine_Interaction"},{"id":1400864,"name":"Digital Transformation","url":"https://www.academia.edu/Documents/in/Digital_Transformation"}],"urls":[{"id":37229924,"url":"https://link.springer.com/content/pdf/10.1007/978-3-030-58468-9_24"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609804"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609804/Methodology_for_the_Field_Evaluation_of_the_Impact_of_Augmented_Reality_Tools_for_Maintenance_Workers_in_the_Aeronautic_Industry"><img alt="Research paper thumbnail of Methodology for the Field Evaluation of the Impact of Augmented Reality Tools for Maintenance Workers in the Aeronautic Industry" class="work-thumbnail" src="https://attachments.academia-assets.com/109098761/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609804/Methodology_for_the_Field_Evaluation_of_the_Impact_of_Augmented_Reality_Tools_for_Maintenance_Workers_in_the_Aeronautic_Industry">Methodology for the Field Evaluation of the Impact of Augmented Reality Tools for Maintenance Workers in the Aeronautic Industry</a></div><div class="wp-workCard_item"><span>Frontiers in Virtual Reality</span><span>, 2021</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Augmented Reality (AR) enhances the comprehension of complex situations by making the handling of...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Augmented Reality (AR) enhances the comprehension of complex situations by making the handling of contextual information easier. Maintenance activities in aeronautics consist of complex tasks carried out on various high-technology products under severe constraints from the sector and work environment. AR tools appear to be a potential solution to improve interactions between workers and technical data to increase the productivity and the quality of aeronautical maintenance activities. However, assessments of the actual impact of AR on industrial processes are limited due to a lack of methods and tools to assist in the integration and evaluation of AR tools in the field. This paper presents a method for deploying AR tools adapted to maintenance workers and for selecting relevant evaluation criteria of the impact in an industrial context. This method is applied to design an AR tool for the maintenance workshop, to experiment on real use cases, and to observe the impact of AR on produc...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="63ea5efb3ac6d2e7768889c65ef3c5ef" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098761,&quot;asset_id&quot;:111609804,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098761/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609804"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609804"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609804; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609804]").text(description); $(".js-view-count[data-work-id=111609804]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609804; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609804']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609804, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "63ea5efb3ac6d2e7768889c65ef3c5ef" } } $('.js-work-strip[data-work-id=111609804]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609804,"title":"Methodology for the Field Evaluation of the Impact of Augmented Reality Tools for Maintenance Workers in the Aeronautic Industry","translated_title":"","metadata":{"abstract":"Augmented Reality (AR) enhances the comprehension of complex situations by making the handling of contextual information easier. Maintenance activities in aeronautics consist of complex tasks carried out on various high-technology products under severe constraints from the sector and work environment. AR tools appear to be a potential solution to improve interactions between workers and technical data to increase the productivity and the quality of aeronautical maintenance activities. However, assessments of the actual impact of AR on industrial processes are limited due to a lack of methods and tools to assist in the integration and evaluation of AR tools in the field. This paper presents a method for deploying AR tools adapted to maintenance workers and for selecting relevant evaluation criteria of the impact in an industrial context. This method is applied to design an AR tool for the maintenance workshop, to experiment on real use cases, and to observe the impact of AR on produc...","publisher":"Frontiers Media SA","publication_date":{"day":null,"month":null,"year":2021,"errors":{}},"publication_name":"Frontiers in Virtual Reality"},"translated_abstract":"Augmented Reality (AR) enhances the comprehension of complex situations by making the handling of contextual information easier. Maintenance activities in aeronautics consist of complex tasks carried out on various high-technology products under severe constraints from the sector and work environment. AR tools appear to be a potential solution to improve interactions between workers and technical data to increase the productivity and the quality of aeronautical maintenance activities. However, assessments of the actual impact of AR on industrial processes are limited due to a lack of methods and tools to assist in the integration and evaluation of AR tools in the field. This paper presents a method for deploying AR tools adapted to maintenance workers and for selecting relevant evaluation criteria of the impact in an industrial context. This method is applied to design an AR tool for the maintenance workshop, to experiment on real use cases, and to observe the impact of AR on produc...","internal_url":"https://www.academia.edu/111609804/Methodology_for_the_Field_Evaluation_of_the_Impact_of_Augmented_Reality_Tools_for_Maintenance_Workers_in_the_Aeronautic_Industry","translated_internal_url":"","created_at":"2023-12-16T23:55:56.022-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098761,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098761/thumbnails/1.jpg","file_name":"frvir-01-603189.pdf","download_url":"https://www.academia.edu/attachments/109098761/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Methodology_for_the_Field_Evaluation_of.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098761/frvir-01-603189-libre.pdf?1702800345=\u0026response-content-disposition=attachment%3B+filename%3DMethodology_for_the_Field_Evaluation_of.pdf\u0026Expires=1733201481\u0026Signature=ZR~YJvewR-cfH9vwSB~0iyIkfhS8413YQUTOu-K5H-f-5~Rbaxz-JOe71QINxvXSF21n007XWFdLUg25sraL3wfJT0~QOCNwZenMvljO8-iUSt39GZUpqJeEaclIvI0YGbFyQD3uplT-ylmfcYbYCxn8fpwLnHM0iBIlESSRQz4bqUhFs1RcTMY2Fpfduh5agZo8GCiTmZTMkFhJ1rxQpIBfDH3Yxa3F~y~b-r44UFDP3NEhSkliWI1dZAPlh3HfYuu~PlfPDJk0sVt2XLLNsSX2~Qv8ukAJPkbLCHL1bziNeGnPxF-torwtK7kjBKWh8N0WaCWwX32RoSRXpz6Llw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Methodology_for_the_Field_Evaluation_of_the_Impact_of_Augmented_Reality_Tools_for_Maintenance_Workers_in_the_Aeronautic_Industry","translated_slug":"","page_count":14,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098761,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098761/thumbnails/1.jpg","file_name":"frvir-01-603189.pdf","download_url":"https://www.academia.edu/attachments/109098761/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Methodology_for_the_Field_Evaluation_of.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098761/frvir-01-603189-libre.pdf?1702800345=\u0026response-content-disposition=attachment%3B+filename%3DMethodology_for_the_Field_Evaluation_of.pdf\u0026Expires=1733201481\u0026Signature=ZR~YJvewR-cfH9vwSB~0iyIkfhS8413YQUTOu-K5H-f-5~Rbaxz-JOe71QINxvXSF21n007XWFdLUg25sraL3wfJT0~QOCNwZenMvljO8-iUSt39GZUpqJeEaclIvI0YGbFyQD3uplT-ylmfcYbYCxn8fpwLnHM0iBIlESSRQz4bqUhFs1RcTMY2Fpfduh5agZo8GCiTmZTMkFhJ1rxQpIBfDH3Yxa3F~y~b-r44UFDP3NEhSkliWI1dZAPlh3HfYuu~PlfPDJk0sVt2XLLNsSX2~Qv8ukAJPkbLCHL1bziNeGnPxF-torwtK7kjBKWh8N0WaCWwX32RoSRXpz6Llw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":5673,"name":"Augmented Reality","url":"https://www.academia.edu/Documents/in/Augmented_Reality"},{"id":8968,"name":"Productivity","url":"https://www.academia.edu/Documents/in/Productivity"},{"id":57873,"name":"Aeronautic","url":"https://www.academia.edu/Documents/in/Aeronautic"},{"id":85280,"name":"Industry","url":"https://www.academia.edu/Documents/in/Industry"},{"id":87372,"name":"Maintenance","url":"https://www.academia.edu/Documents/in/Maintenance"},{"id":345840,"name":"Criteria","url":"https://www.academia.edu/Documents/in/Criteria"},{"id":1781499,"name":"Deploy","url":"https://www.academia.edu/Documents/in/Deploy"}],"urls":[{"id":37229923,"url":"https://www.frontiersin.org/articles/10.3389/frvir.2020.603189/full"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609803"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609803/3D_Human_Pose_Estimation_with_a_Catadioptric_Sensor_in_Unconstrained_Environments_Using_an_Annealed_Particle_Filter"><img alt="Research paper thumbnail of 3D Human Pose Estimation with a Catadioptric Sensor in Unconstrained Environments Using an Annealed Particle Filter" class="work-thumbnail" src="https://attachments.academia-assets.com/109098705/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609803/3D_Human_Pose_Estimation_with_a_Catadioptric_Sensor_in_Unconstrained_Environments_Using_an_Annealed_Particle_Filter">3D Human Pose Estimation with a Catadioptric Sensor in Unconstrained Environments Using an Annealed Particle Filter</a></div><div class="wp-workCard_item"><span>Sensors</span><span>, 2020</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The purpose of this paper is to investigate the problem of 3D human tracking in complex environme...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The purpose of this paper is to investigate the problem of 3D human tracking in complex environments using a particle filter with images captured by a catadioptric vision system. This issue has been widely studied in the literature on RGB images acquired from conventional perspective cameras, while omnidirectional images have seldom been used and published research works in this field remains limited. In this study, the Riemannian varieties was considered in order to compute the gradient on spherical images and generate a robust descriptor used along with an SVM classifier for human detection. Original likelihood functions associated with the particle filter are proposed, using both geodesic distances and overlapping regions between the silhouette detected in the images and the projected 3D human model. Our approach was experimentally evaluated on real data and showed favorable results compared to machine learning based techniques about the 3D pose accuracy. Thus, the Root Mean Squa...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="d73b96cd912a61b2eb5645e20f435689" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098705,&quot;asset_id&quot;:111609803,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098705/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609803"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609803"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609803; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609803]").text(description); $(".js-view-count[data-work-id=111609803]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609803; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609803']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609803, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "d73b96cd912a61b2eb5645e20f435689" } } $('.js-work-strip[data-work-id=111609803]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609803,"title":"3D Human Pose Estimation with a Catadioptric Sensor in Unconstrained Environments Using an Annealed Particle Filter","translated_title":"","metadata":{"abstract":"The purpose of this paper is to investigate the problem of 3D human tracking in complex environments using a particle filter with images captured by a catadioptric vision system. This issue has been widely studied in the literature on RGB images acquired from conventional perspective cameras, while omnidirectional images have seldom been used and published research works in this field remains limited. In this study, the Riemannian varieties was considered in order to compute the gradient on spherical images and generate a robust descriptor used along with an SVM classifier for human detection. Original likelihood functions associated with the particle filter are proposed, using both geodesic distances and overlapping regions between the silhouette detected in the images and the projected 3D human model. Our approach was experimentally evaluated on real data and showed favorable results compared to machine learning based techniques about the 3D pose accuracy. Thus, the Root Mean Squa...","publisher":"MDPI AG","publication_date":{"day":null,"month":null,"year":2020,"errors":{}},"publication_name":"Sensors"},"translated_abstract":"The purpose of this paper is to investigate the problem of 3D human tracking in complex environments using a particle filter with images captured by a catadioptric vision system. This issue has been widely studied in the literature on RGB images acquired from conventional perspective cameras, while omnidirectional images have seldom been used and published research works in this field remains limited. In this study, the Riemannian varieties was considered in order to compute the gradient on spherical images and generate a robust descriptor used along with an SVM classifier for human detection. Original likelihood functions associated with the particle filter are proposed, using both geodesic distances and overlapping regions between the silhouette detected in the images and the projected 3D human model. Our approach was experimentally evaluated on real data and showed favorable results compared to machine learning based techniques about the 3D pose accuracy. Thus, the Root Mean Squa...","internal_url":"https://www.academia.edu/111609803/3D_Human_Pose_Estimation_with_a_Catadioptric_Sensor_in_Unconstrained_Environments_Using_an_Annealed_Particle_Filter","translated_internal_url":"","created_at":"2023-12-16T23:55:55.811-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098705,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098705/thumbnails/1.jpg","file_name":"pdf.pdf","download_url":"https://www.academia.edu/attachments/109098705/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"3D_Human_Pose_Estimation_with_a_Catadiop.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098705/pdf-libre.pdf?1702800360=\u0026response-content-disposition=attachment%3B+filename%3D3D_Human_Pose_Estimation_with_a_Catadiop.pdf\u0026Expires=1733201481\u0026Signature=bxJbG~iiky0MVElmi1Wsw2AREOS6B0054bR3NqrFjKk2amCjc5~Ayp6klYa35am7qLoK9y9jXyRzwHtqzUF8SThM4Vb4jH-vgzrs2XphNDAuIKj77vDFnh853e7SrTEy3zK6myf8Rhod3jDoi9g5NdUGe7g0o4hZsfIcRz3Hh0yebQafl2wGcA6aH-5STu3VQXzApM2dTaJwVIfh-2UpumavPqYDzBzJOd2wPUcvvd4c4Ag5Tmn7Yq~jrEH7iVVHnfjXkwwDssCdMTNM1lWX4Zp061PQPjFs-xBQE5wLobypvj6NdY7mzmXBQHiS6~acK5YpcyeW0QVghP~6QEaoOw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"3D_Human_Pose_Estimation_with_a_Catadioptric_Sensor_in_Unconstrained_Environments_Using_an_Annealed_Particle_Filter","translated_slug":"","page_count":17,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098705,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098705/thumbnails/1.jpg","file_name":"pdf.pdf","download_url":"https://www.academia.edu/attachments/109098705/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"3D_Human_Pose_Estimation_with_a_Catadiop.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098705/pdf-libre.pdf?1702800360=\u0026response-content-disposition=attachment%3B+filename%3D3D_Human_Pose_Estimation_with_a_Catadiop.pdf\u0026Expires=1733201481\u0026Signature=bxJbG~iiky0MVElmi1Wsw2AREOS6B0054bR3NqrFjKk2amCjc5~Ayp6klYa35am7qLoK9y9jXyRzwHtqzUF8SThM4Vb4jH-vgzrs2XphNDAuIKj77vDFnh853e7SrTEy3zK6myf8Rhod3jDoi9g5NdUGe7g0o4hZsfIcRz3Hh0yebQafl2wGcA6aH-5STu3VQXzApM2dTaJwVIfh-2UpumavPqYDzBzJOd2wPUcvvd4c4Ag5Tmn7Yq~jrEH7iVVHnfjXkwwDssCdMTNM1lWX4Zp061PQPjFs-xBQE5wLobypvj6NdY7mzmXBQHiS6~acK5YpcyeW0QVghP~6QEaoOw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"},{"id":109098688,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098688/thumbnails/1.jpg","file_name":"pdf.pdf","download_url":"https://www.academia.edu/attachments/109098688/download_file","bulk_download_file_name":"3D_Human_Pose_Estimation_with_a_Catadiop.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098688/pdf-libre.pdf?1702800366=\u0026response-content-disposition=attachment%3B+filename%3D3D_Human_Pose_Estimation_with_a_Catadiop.pdf\u0026Expires=1733201481\u0026Signature=EI77byXvxmqr1YuNRkLEFVfnROeh2KwHHC8~mM25Kz8otnzGdeNIWf4wbTKKaY0t5QYlGrSyz3aBXKMvyIdiwYh1OWtHtDES0PmgI~K8xKA89UVCMHOjPnZ3~dvLCIm5J6BDN~DAQOgOTy9BU5PXH~BL~XjI1pTOioE2~73LOZluBLT9nsgK9RMWSFRKW-iPCb1fKx4ZSJ8wH5EmTaw3x398~ae1k33z60b5g1m7rlZan2Bnn4B-Tmj0LSb~BupZIaPpj3VoKROpmZ~wemwouivtT7AHw0hjdJnD7o~AeMvgywcEyTnVhfpfuM3n30gMm5DvANLkWQ3XTtjYPNEQzA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence"},{"id":524,"name":"Analytical Chemistry","url":"https://www.academia.edu/Documents/in/Analytical_Chemistry"},{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision"},{"id":26327,"name":"Medicine","url":"https://www.academia.edu/Documents/in/Medicine"},{"id":55405,"name":"Sensors","url":"https://www.academia.edu/Documents/in/Sensors"},{"id":100874,"name":"Human Tracking","url":"https://www.academia.edu/Documents/in/Human_Tracking"},{"id":261121,"name":"Particle Filter","url":"https://www.academia.edu/Documents/in/Particle_Filter"},{"id":1237788,"name":"Electrical And Electronic Engineering","url":"https://www.academia.edu/Documents/in/Electrical_And_Electronic_Engineering"},{"id":1648187,"name":"Silhouette","url":"https://www.academia.edu/Documents/in/Silhouette"},{"id":2755698,"name":"Omnidirectional Camera","url":"https://www.academia.edu/Documents/in/Omnidirectional_Camera"},{"id":3209333,"name":"Mean Squared Error","url":"https://www.academia.edu/Documents/in/Mean_Squared_Error"}],"urls":[{"id":37229922,"url":"https://www.mdpi.com/1424-8220/20/23/6985/pdf"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609802"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609802/An_Efficient_Human_Activity_Recognition_Technique_Based_on_Deep_Learning"><img alt="Research paper thumbnail of An Efficient Human Activity Recognition Technique Based on Deep Learning" class="work-thumbnail" src="https://attachments.academia-assets.com/109098776/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609802/An_Efficient_Human_Activity_Recognition_Technique_Based_on_Deep_Learning">An Efficient Human Activity Recognition Technique Based on Deep Learning</a></div><div class="wp-workCard_item"><span>Pattern Recognition and Image Analysis</span><span>, 2019</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="cb81a65636e34f8edb82a26e73ea1a41" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098776,&quot;asset_id&quot;:111609802,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098776/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609802"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609802"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609802; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609802]").text(description); $(".js-view-count[data-work-id=111609802]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609802; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609802']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609802, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "cb81a65636e34f8edb82a26e73ea1a41" } } $('.js-work-strip[data-work-id=111609802]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609802,"title":"An Efficient Human Activity Recognition Technique Based on Deep Learning","translated_title":"","metadata":{"publisher":"Pleiades Publishing Ltd","ai_title_tag":"Efficient Deep Learning for Human Activity Recognition","grobid_abstract":"In this paper, we present a new deep learning-based human activity recognition technique. First, we track and extract human body from each frame of the video stream. Next, we abstract human silhouettes and use them to create binary space-time maps (BSTMs) which summarize human activity within a defined time interval. Finally, we use convolutional neural network (CNN) to extract features from BSTMs and classify the activities. To evaluate our approach, we carried out several tests using three public datasets: Weizmann, Keck Gesture and KTH Database. Experimental results show that our technique outperforms conventional state-of-the-art methods in term of recognition accuracy and provides comparable performance against recent deep learning techniques. It's simple to implement, requires less computing power, and can be used for multi-subject activity recognition.","publication_date":{"day":null,"month":null,"year":2019,"errors":{}},"publication_name":"Pattern Recognition and Image Analysis","grobid_abstract_attachment_id":109098776},"translated_abstract":null,"internal_url":"https://www.academia.edu/111609802/An_Efficient_Human_Activity_Recognition_Technique_Based_on_Deep_Learning","translated_internal_url":"","created_at":"2023-12-16T23:55:55.558-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098776,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098776/thumbnails/1.jpg","file_name":"LISPEN_PRIA_ABABSA_2019.pdf","download_url":"https://www.academia.edu/attachments/109098776/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"An_Efficient_Human_Activity_Recognition.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098776/LISPEN_PRIA_ABABSA_2019-libre.pdf?1702800348=\u0026response-content-disposition=attachment%3B+filename%3DAn_Efficient_Human_Activity_Recognition.pdf\u0026Expires=1733201481\u0026Signature=fWxzzyFgkmNzbaTMZa0WG4HPAANcrinG21zekO~o5MDR45JTASjkxPrabp1~IsjnLl5n8ML0dagyKUCjSgAulMGTxPRu2WcKi7bW3DmH-~RmVYU8LVfSs-q6B2QgUGVBgtM9UHFEDBNekwnhAx0mQLrmDSCyPWGBQXi2TvY8CkTckq6txFbix5kbkunUu3o-kFl95i2yPFmcSgQ1KGVu405M4aAYoJx3MLBc8R2D274f0NlJpTnGNVb-qO6685OV6vaae~chDIUI6BAFAfPUJ3iRxXdu~CWcQzDB2hICo1uQRVTFPXSk3uXYfOePJN-lxJ7A4ysboswO9oTmzFq5pw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"An_Efficient_Human_Activity_Recognition_Technique_Based_on_Deep_Learning","translated_slug":"","page_count":15,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098776,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098776/thumbnails/1.jpg","file_name":"LISPEN_PRIA_ABABSA_2019.pdf","download_url":"https://www.academia.edu/attachments/109098776/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"An_Efficient_Human_Activity_Recognition.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098776/LISPEN_PRIA_ABABSA_2019-libre.pdf?1702800348=\u0026response-content-disposition=attachment%3B+filename%3DAn_Efficient_Human_Activity_Recognition.pdf\u0026Expires=1733201481\u0026Signature=fWxzzyFgkmNzbaTMZa0WG4HPAANcrinG21zekO~o5MDR45JTASjkxPrabp1~IsjnLl5n8ML0dagyKUCjSgAulMGTxPRu2WcKi7bW3DmH-~RmVYU8LVfSs-q6B2QgUGVBgtM9UHFEDBNekwnhAx0mQLrmDSCyPWGBQXi2TvY8CkTckq6txFbix5kbkunUu3o-kFl95i2yPFmcSgQ1KGVu405M4aAYoJx3MLBc8R2D274f0NlJpTnGNVb-qO6685OV6vaae~chDIUI6BAFAfPUJ3iRxXdu~CWcQzDB2hICo1uQRVTFPXSk3uXYfOePJN-lxJ7A4ysboswO9oTmzFq5pw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":48,"name":"Engineering","url":"https://www.academia.edu/Documents/in/Engineering"},{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence"},{"id":17701,"name":"Gesture Recognition","url":"https://www.academia.edu/Documents/in/Gesture_Recognition"},{"id":36449,"name":"Activity Recognition","url":"https://www.academia.edu/Documents/in/Activity_Recognition"},{"id":57238,"name":"Human Activity Recognition","url":"https://www.academia.edu/Documents/in/Human_Activity_Recognition"},{"id":61145,"name":"Medical Image Analysis and Pattern Recognition","url":"https://www.academia.edu/Documents/in/Medical_Image_Analysis_and_Pattern_Recognition"},{"id":80414,"name":"Mathematical Sciences","url":"https://www.academia.edu/Documents/in/Mathematical_Sciences"},{"id":81182,"name":"Deep Learning","url":"https://www.academia.edu/Documents/in/Deep_Learning"},{"id":106145,"name":"Classification","url":"https://www.academia.edu/Documents/in/Classification"},{"id":1568111,"name":"Convolutional Neural Network","url":"https://www.academia.edu/Documents/in/Convolutional_Neural_Network"},{"id":1597410,"name":"Features Extraction","url":"https://www.academia.edu/Documents/in/Features_Extraction"},{"id":2058532,"name":"Convolutional Neural Network [CNN]","url":"https://www.academia.edu/Documents/in/Convolutional_Neural_Network_CNN_"}],"urls":[{"id":37229921,"url":"http://link.springer.com/content/pdf/10.1134/S1054661819040084.pdf"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609800"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609800/Fusion_of_structural_and_textural_features_for_melanoma_recognition"><img alt="Research paper thumbnail of Fusion of structural and textural features for melanoma recognition" class="work-thumbnail" src="https://attachments.academia-assets.com/109098762/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609800/Fusion_of_structural_and_textural_features_for_melanoma_recognition">Fusion of structural and textural features for melanoma recognition</a></div><div class="wp-workCard_item"><span>IET Computer Vision</span><span>, 2018</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Melanoma is one the most increasing cancers since past decades. For accurate detection and classi...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Melanoma is one the most increasing cancers since past decades. For accurate detection and classification, discriminative features are required to distinguish between benign and malignant cases. In this study, the authors introduce a fusion of structural and textural features from two descriptors. The structural features are extracted from wavelet and curvelet transforms, whereas the textural features are extracted from different variants of local binary pattern operator. The proposed method is implemented on 200 images from dermoscopy database including 160 non‐melanoma and 40 melanoma images, where a rigorous statistical analysis for the database is performed. Using support vector machine (SVM) classifier with random sampling cross‐validation method between the three cases of skin lesions given in the database, the validated results showed a very encouraging performance with a sensitivity of 78.93%, a specificity of 93.25% and an accuracy of 86.07%. The proposed approach outperfor...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="bd0812efde7ea58cf3f98e12837292d2" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098762,&quot;asset_id&quot;:111609800,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098762/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609800"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609800"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609800; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609800]").text(description); $(".js-view-count[data-work-id=111609800]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609800; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609800']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609800, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "bd0812efde7ea58cf3f98e12837292d2" } } $('.js-work-strip[data-work-id=111609800]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609800,"title":"Fusion of structural and textural features for melanoma recognition","translated_title":"","metadata":{"abstract":"Melanoma is one the most increasing cancers since past decades. For accurate detection and classification, discriminative features are required to distinguish between benign and malignant cases. In this study, the authors introduce a fusion of structural and textural features from two descriptors. The structural features are extracted from wavelet and curvelet transforms, whereas the textural features are extracted from different variants of local binary pattern operator. The proposed method is implemented on 200 images from dermoscopy database including 160 non‐melanoma and 40 melanoma images, where a rigorous statistical analysis for the database is performed. Using support vector machine (SVM) classifier with random sampling cross‐validation method between the three cases of skin lesions given in the database, the validated results showed a very encouraging performance with a sensitivity of 78.93%, a specificity of 93.25% and an accuracy of 86.07%. The proposed approach outperfor...","publisher":"Institution of Engineering and Technology (IET)","ai_title_tag":"Melanoma Detection via Structural and Textural Feature Fusion","publication_date":{"day":null,"month":null,"year":2018,"errors":{}},"publication_name":"IET Computer Vision"},"translated_abstract":"Melanoma is one the most increasing cancers since past decades. For accurate detection and classification, discriminative features are required to distinguish between benign and malignant cases. In this study, the authors introduce a fusion of structural and textural features from two descriptors. The structural features are extracted from wavelet and curvelet transforms, whereas the textural features are extracted from different variants of local binary pattern operator. The proposed method is implemented on 200 images from dermoscopy database including 160 non‐melanoma and 40 melanoma images, where a rigorous statistical analysis for the database is performed. Using support vector machine (SVM) classifier with random sampling cross‐validation method between the three cases of skin lesions given in the database, the validated results showed a very encouraging performance with a sensitivity of 78.93%, a specificity of 93.25% and an accuracy of 86.07%. The proposed approach outperfor...","internal_url":"https://www.academia.edu/111609800/Fusion_of_structural_and_textural_features_for_melanoma_recognition","translated_internal_url":"","created_at":"2023-12-16T23:55:55.307-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098762,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098762/thumbnails/1.jpg","file_name":"paper3.pdf","download_url":"https://www.academia.edu/attachments/109098762/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Fusion_of_structural_and_textural_featur.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098762/paper3-libre.pdf?1702800346=\u0026response-content-disposition=attachment%3B+filename%3DFusion_of_structural_and_textural_featur.pdf\u0026Expires=1733201481\u0026Signature=aqJHVsDbDdZfYXO4UYt7HgO1fWLWGM60OdKADjUzxARh5lVIFFfFh2tQiUFXI2BPEdFVud2VifwaaUXFJuDHSC9SO3wFrYxYPCFSN6SReIVI6Q1E6eqNvU14cmr40wb3sxUeHmQgeZmDliR3Np3J-d6DIMY5hJ7VvUgi8VNCNqeSbutUWacHQUEfa7htAyMtMvRYNYnf05jCsH-maZTWYmOOfLEoCcS4pvYAYu~ANsrUx0mxKwMoaWsX9NDHsRje-NRUzgkUjjenMchF9oH7YE-kfCsUbdL5TmDFl8MFS4L1Gcm6YlD4X1vL8srQGW41eH0uogcpaCMq6XAohCZm7g__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Fusion_of_structural_and_textural_features_for_melanoma_recognition","translated_slug":"","page_count":7,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098762,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098762/thumbnails/1.jpg","file_name":"paper3.pdf","download_url":"https://www.academia.edu/attachments/109098762/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Fusion_of_structural_and_textural_featur.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098762/paper3-libre.pdf?1702800346=\u0026response-content-disposition=attachment%3B+filename%3DFusion_of_structural_and_textural_featur.pdf\u0026Expires=1733201481\u0026Signature=aqJHVsDbDdZfYXO4UYt7HgO1fWLWGM60OdKADjUzxARh5lVIFFfFh2tQiUFXI2BPEdFVud2VifwaaUXFJuDHSC9SO3wFrYxYPCFSN6SReIVI6Q1E6eqNvU14cmr40wb3sxUeHmQgeZmDliR3Np3J-d6DIMY5hJ7VvUgi8VNCNqeSbutUWacHQUEfa7htAyMtMvRYNYnf05jCsH-maZTWYmOOfLEoCcS4pvYAYu~ANsrUx0mxKwMoaWsX9NDHsRje-NRUzgkUjjenMchF9oH7YE-kfCsUbdL5TmDFl8MFS4L1Gcm6YlD4X1vL8srQGW41eH0uogcpaCMq6XAohCZm7g__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":237,"name":"Cognitive Science","url":"https://www.academia.edu/Documents/in/Cognitive_Science"},{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence"},{"id":4998,"name":"Medical Image Processing","url":"https://www.academia.edu/Documents/in/Medical_Image_Processing"},{"id":6021,"name":"Cancer","url":"https://www.academia.edu/Documents/in/Cancer"},{"id":10408,"name":"Support Vector Machines","url":"https://www.academia.edu/Documents/in/Support_Vector_Machines"},{"id":83038,"name":"Image fusion","url":"https://www.academia.edu/Documents/in/Image_fusion"},{"id":91365,"name":"Wavelet Transforms","url":"https://www.academia.edu/Documents/in/Wavelet_Transforms"},{"id":150094,"name":"Fusion","url":"https://www.academia.edu/Documents/in/Fusion"},{"id":160144,"name":"Feature Extraction","url":"https://www.academia.edu/Documents/in/Feature_Extraction"},{"id":167397,"name":"Image recognition","url":"https://www.academia.edu/Documents/in/Image_recognition"},{"id":191289,"name":"Support vector machine","url":"https://www.academia.edu/Documents/in/Support_vector_machine"},{"id":403692,"name":"Curvelet","url":"https://www.academia.edu/Documents/in/Curvelet"},{"id":457105,"name":"Dermoscopy","url":"https://www.academia.edu/Documents/in/Dermoscopy"},{"id":1010893,"name":"Cancers","url":"https://www.academia.edu/Documents/in/Cancers"},{"id":1237788,"name":"Electrical And Electronic Engineering","url":"https://www.academia.edu/Documents/in/Electrical_And_Electronic_Engineering"},{"id":1372175,"name":"Textural Features","url":"https://www.academia.edu/Documents/in/Textural_Features"},{"id":1991646,"name":"Local Binary Patterns","url":"https://www.academia.edu/Documents/in/Local_Binary_Patterns"},{"id":2571968,"name":"Structural Features","url":"https://www.academia.edu/Documents/in/Structural_Features"}],"urls":[{"id":37229920,"url":"https://onlinelibrary.wiley.com/doi/pdf/10.1049/iet-cvi.2017.0193"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609798"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609798/Towards_improving_the_future_of_manufacturing_through_digital_twin_and_augmented_reality_technologies"><img alt="Research paper thumbnail of Towards improving the future of manufacturing through digital twin and augmented reality technologies" class="work-thumbnail" src="https://attachments.academia-assets.com/109098735/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609798/Towards_improving_the_future_of_manufacturing_through_digital_twin_and_augmented_reality_technologies">Towards improving the future of manufacturing through digital twin and augmented reality technologies</a></div><div class="wp-workCard_item"><span>Procedia Manufacturing</span><span>, 2018</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="154ba7982e0aa20d598f9569be034f28" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098735,&quot;asset_id&quot;:111609798,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098735/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609798"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609798"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609798; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609798]").text(description); $(".js-view-count[data-work-id=111609798]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609798; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609798']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609798, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "154ba7982e0aa20d598f9569be034f28" } } $('.js-work-strip[data-work-id=111609798]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609798,"title":"Towards improving the future of manufacturing through digital twin and augmented reality technologies","translated_title":"","metadata":{"publisher":"Elsevier BV","grobid_abstract":"Under the concept of \"Industry 4.0\", production processes will be pushed to be increasingly interconnected, information based on a real time basis and, necessarily, much more efficient. In this context, capacity optimization goes beyond the traditional aim of capacity maximization, contributing also for organization's profitability and value. Indeed, lean management and continuous improvement approaches suggest capacity optimization instead of maximization. The study of capacity optimization and costing models is an important research topic that deserves contributions from both the practical and theoretical perspectives. This paper presents and discusses a mathematical model for capacity management based on different costing models (ABC and TDABC). A generic model has been developed and it was used to analyze idle capacity and to design strategies towards the maximization of organization's value. The trade-off capacity maximization vs operational efficiency is highlighted and it is shown that capacity optimization might hide operational inefficiency.","publication_date":{"day":null,"month":null,"year":2018,"errors":{}},"publication_name":"Procedia Manufacturing","grobid_abstract_attachment_id":109098735},"translated_abstract":null,"internal_url":"https://www.academia.edu/111609798/Towards_improving_the_future_of_manufacturing_through_digital_twin_and_augmented_reality_technologies","translated_internal_url":"","created_at":"2023-12-16T23:55:55.015-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098735,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098735/thumbnails/1.jpg","file_name":"LE2I_PROCMANUFACTURING_2018_ABABSA.pdf","download_url":"https://www.academia.edu/attachments/109098735/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Towards_improving_the_future_of_manufact.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098735/LE2I_PROCMANUFACTURING_2018_ABABSA-libre.pdf?1702800352=\u0026response-content-disposition=attachment%3B+filename%3DTowards_improving_the_future_of_manufact.pdf\u0026Expires=1733201481\u0026Signature=YXGiWDBEdUZ7EHV-yttnMDAFeKUc329CtV9lPWw9d-j75ONsgNC~mNx9Jl6yeADOYaIfqTmVXYBlkxP3VrqxonVDuLFEtr1lRVmD0ZrL4YvY3FfsHIK4EkcUhzs5v1Sb4Bv2h1GFHPSoipMSjfBrJfHJlcXF7bzllkfjEmiLEA2SuB4Tv4YJ6aRPb~-unqVC29W74rjPzHJJn140RYjEDgMG-cQbhVHZtyHmB8w9w~Qd6sIrtW5YAPJm9LhCx1Ckhfqd8xJtvPCkgtM43CWNkaU45tWh93a9NV2Z5Ytqajuoz7tN-cEFCdLtjkrWa9RGH~hcsIsIuuHWzK5GMnkfmw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Towards_improving_the_future_of_manufacturing_through_digital_twin_and_augmented_reality_technologies","translated_slug":"","page_count":9,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098735,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098735/thumbnails/1.jpg","file_name":"LE2I_PROCMANUFACTURING_2018_ABABSA.pdf","download_url":"https://www.academia.edu/attachments/109098735/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Towards_improving_the_future_of_manufact.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098735/LE2I_PROCMANUFACTURING_2018_ABABSA-libre.pdf?1702800352=\u0026response-content-disposition=attachment%3B+filename%3DTowards_improving_the_future_of_manufact.pdf\u0026Expires=1733201481\u0026Signature=YXGiWDBEdUZ7EHV-yttnMDAFeKUc329CtV9lPWw9d-j75ONsgNC~mNx9Jl6yeADOYaIfqTmVXYBlkxP3VrqxonVDuLFEtr1lRVmD0ZrL4YvY3FfsHIK4EkcUhzs5v1Sb4Bv2h1GFHPSoipMSjfBrJfHJlcXF7bzllkfjEmiLEA2SuB4Tv4YJ6aRPb~-unqVC29W74rjPzHJJn140RYjEDgMG-cQbhVHZtyHmB8w9w~Qd6sIrtW5YAPJm9LhCx1Ckhfqd8xJtvPCkgtM43CWNkaU45tWh93a9NV2Z5Ytqajuoz7tN-cEFCdLtjkrWa9RGH~hcsIsIuuHWzK5GMnkfmw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":26,"name":"Business","url":"https://www.academia.edu/Documents/in/Business"},{"id":5673,"name":"Augmented Reality","url":"https://www.academia.edu/Documents/in/Augmented_Reality"},{"id":8910,"name":"Evaluation","url":"https://www.academia.edu/Documents/in/Evaluation"},{"id":61714,"name":"Production economics","url":"https://www.academia.edu/Documents/in/Production_economics"},{"id":66379,"name":"Automation","url":"https://www.academia.edu/Documents/in/Automation"},{"id":85280,"name":"Industry","url":"https://www.academia.edu/Documents/in/Industry"},{"id":167727,"name":"Industrial Revolution","url":"https://www.academia.edu/Documents/in/Industrial_Revolution"},{"id":511079,"name":"Predictive Maintenance","url":"https://www.academia.edu/Documents/in/Predictive_Maintenance"},{"id":2741797,"name":"Digital Twin","url":"https://www.academia.edu/Documents/in/Digital_Twin"}],"urls":[{"id":37229918,"url":"https://api.elsevier.com/content/article/PII:S2351978918311867?httpAccept=text/xml"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609797"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609797/A_Non_rigid_Face_Tracking_Method_for_Wide_Rotation_Using_Synthetic_Data"><img alt="Research paper thumbnail of A Non-rigid Face Tracking Method for Wide Rotation Using Synthetic Data" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609797/A_Non_rigid_Face_Tracking_Method_for_Wide_Rotation_Using_Synthetic_Data">A Non-rigid Face Tracking Method for Wide Rotation Using Synthetic Data</a></div><div class="wp-workCard_item"><span>Lecture Notes in Computer Science</span><span>, 2015</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">This paper propose a new method for wide-rotation non-rigid face tracking that is still a challen...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">This paper propose a new method for wide-rotation non-rigid face tracking that is still a challenging problem in computer vision community. Our method consists of training and tracking phases. In training, we propose to use a large off-line synthetic database to overcome the problem of data collection. The local appearance models are then trained using linear Support Vector Machine (SVM). In tracking, we propose a two-step approach: (i) The first step uses baseline matching for a good initialization. The matching strategy between the current frame and a set of adaptive keyframes is also involved to be recoverable in terms of failed tracking. (ii) The second step estimates the model parameters using a heuristic method via pose-wise SVMs. The combination makes our approach work robustly with wide rotation, up to \(90^{\circ }\) of vertical axis. In addition, our method appears to be robust even in the presence of fast movements thanks to baseline matching. Compared to state-of-the-art methods, our method shows a good compromise of rigid and non-rigid parameter accuracies. This study gives a promising perspective because of the good results in terms of pose estimation (average error is less than \(4^o\) on BUFT dataset) and landmark tracking precision (5.8 pixel error compared to 6.8 of one state-of-the-art method on Talking Face video. These results highlight the potential of using synthetic data to track non-rigid face in unconstrained poses.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609797"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609797"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609797; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609797]").text(description); $(".js-view-count[data-work-id=111609797]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609797; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609797']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609797, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=111609797]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609797,"title":"A Non-rigid Face Tracking Method for Wide Rotation Using Synthetic Data","translated_title":"","metadata":{"abstract":"This paper propose a new method for wide-rotation non-rigid face tracking that is still a challenging problem in computer vision community. Our method consists of training and tracking phases. In training, we propose to use a large off-line synthetic database to overcome the problem of data collection. The local appearance models are then trained using linear Support Vector Machine (SVM). In tracking, we propose a two-step approach: (i) The first step uses baseline matching for a good initialization. The matching strategy between the current frame and a set of adaptive keyframes is also involved to be recoverable in terms of failed tracking. (ii) The second step estimates the model parameters using a heuristic method via pose-wise SVMs. The combination makes our approach work robustly with wide rotation, up to \\(90^{\\circ }\\) of vertical axis. In addition, our method appears to be robust even in the presence of fast movements thanks to baseline matching. Compared to state-of-the-art methods, our method shows a good compromise of rigid and non-rigid parameter accuracies. This study gives a promising perspective because of the good results in terms of pose estimation (average error is less than \\(4^o\\) on BUFT dataset) and landmark tracking precision (5.8 pixel error compared to 6.8 of one state-of-the-art method on Talking Face video. These results highlight the potential of using synthetic data to track non-rigid face in unconstrained poses.","publication_date":{"day":null,"month":null,"year":2015,"errors":{}},"publication_name":"Lecture Notes in Computer Science"},"translated_abstract":"This paper propose a new method for wide-rotation non-rigid face tracking that is still a challenging problem in computer vision community. Our method consists of training and tracking phases. In training, we propose to use a large off-line synthetic database to overcome the problem of data collection. The local appearance models are then trained using linear Support Vector Machine (SVM). In tracking, we propose a two-step approach: (i) The first step uses baseline matching for a good initialization. The matching strategy between the current frame and a set of adaptive keyframes is also involved to be recoverable in terms of failed tracking. (ii) The second step estimates the model parameters using a heuristic method via pose-wise SVMs. The combination makes our approach work robustly with wide rotation, up to \\(90^{\\circ }\\) of vertical axis. In addition, our method appears to be robust even in the presence of fast movements thanks to baseline matching. Compared to state-of-the-art methods, our method shows a good compromise of rigid and non-rigid parameter accuracies. This study gives a promising perspective because of the good results in terms of pose estimation (average error is less than \\(4^o\\) on BUFT dataset) and landmark tracking precision (5.8 pixel error compared to 6.8 of one state-of-the-art method on Talking Face video. These results highlight the potential of using synthetic data to track non-rigid face in unconstrained poses.","internal_url":"https://www.academia.edu/111609797/A_Non_rigid_Face_Tracking_Method_for_Wide_Rotation_Using_Synthetic_Data","translated_internal_url":"","created_at":"2023-12-16T23:55:54.859-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[],"slug":"A_Non_rigid_Face_Tracking_Method_for_Wide_Rotation_Using_Synthetic_Data","translated_slug":"","page_count":null,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence"},{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision"}],"urls":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609796"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609796/A_Depth_based_Approach_for_3D_Dynamic_Gesture_Recognition"><img alt="Research paper thumbnail of A Depth-based Approach for 3D Dynamic Gesture Recognition" class="work-thumbnail" src="https://attachments.academia-assets.com/109098734/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609796/A_Depth_based_Approach_for_3D_Dynamic_Gesture_Recognition">A Depth-based Approach for 3D Dynamic Gesture Recognition</a></div><div class="wp-workCard_item"><span>Proceedings of the 12th International Conference on Informatics in Control, Automation and Robotics</span><span>, 2015</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="4df3f8ee5842f036fc71a74263db4bd6" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098734,&quot;asset_id&quot;:111609796,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098734/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609796"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609796"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609796; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609796]").text(description); $(".js-view-count[data-work-id=111609796]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609796; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609796']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609796, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "4df3f8ee5842f036fc71a74263db4bd6" } } $('.js-work-strip[data-work-id=111609796]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609796,"title":"A Depth-based Approach for 3D Dynamic Gesture Recognition","translated_title":"","metadata":{"publisher":"SCITEPRESS - Science and and Technology Publications","grobid_abstract":"In this paper we propose a recognition technique of 3D dynamic gesture for human robot interaction (HRI) based on depth information provided by Kinect sensor. The body is tracked using the skeleton algorithm provided by the Kinect SDK. The main idea of this work is to compute the angles of the upper body joints which are active when executing gesture. The variation of these angles are used as inputs of Hidden Markov Models (HMM) in order to recognize the dynamic gestures. Results demonstrate the robustness of our method against environmental conditions such as illumination changes and scene complexity due to using depth information only.","publication_date":{"day":null,"month":null,"year":2015,"errors":{}},"publication_name":"Proceedings of the 12th International Conference on Informatics in Control, Automation and Robotics","grobid_abstract_attachment_id":109098734},"translated_abstract":null,"internal_url":"https://www.academia.edu/111609796/A_Depth_based_Approach_for_3D_Dynamic_Gesture_Recognition","translated_internal_url":"","created_at":"2023-12-16T23:55:54.710-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098734,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098734/thumbnails/1.jpg","file_name":"Hajar_HIYADI_ICINCO.pdf","download_url":"https://www.academia.edu/attachments/109098734/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"A_Depth_based_Approach_for_3D_Dynamic_Ge.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098734/Hajar_HIYADI_ICINCO-libre.pdf?1702800346=\u0026response-content-disposition=attachment%3B+filename%3DA_Depth_based_Approach_for_3D_Dynamic_Ge.pdf\u0026Expires=1733201481\u0026Signature=L~kPLqozZvyb5RVLtdAXeSwUNTUjErE3ait5vWmWdT0LqCduGPcCWBF~JGJh~U5KGe-iYaBkRFIVmgDY93puOGZxISqotzbs3lfIqPi7BFbYj-coOlhow6M0Bh~xUBAF154hwQLcsbyxzEk1zSNcxI0MIOQ4YPk0Lj5xzeoIzzjLsV1JhIPGb6B9VXFr1QeQmZCmB5ggYDJiTVlAdOoiEIBY0Pp9ddDnMUQUr-e3DnO-hCPjqXfr9p-40QPV9hHOPMnU~pSozFS7F7cwWbiBe8U6dLcdn7w8rQeWBhfJAgS8EI9P2NWnHBMpUSM4mcQ-RtnQmLJULyiIamzkEx-lxw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"A_Depth_based_Approach_for_3D_Dynamic_Gesture_Recognition","translated_slug":"","page_count":10,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098734,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098734/thumbnails/1.jpg","file_name":"Hajar_HIYADI_ICINCO.pdf","download_url":"https://www.academia.edu/attachments/109098734/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"A_Depth_based_Approach_for_3D_Dynamic_Ge.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098734/Hajar_HIYADI_ICINCO-libre.pdf?1702800346=\u0026response-content-disposition=attachment%3B+filename%3DA_Depth_based_Approach_for_3D_Dynamic_Ge.pdf\u0026Expires=1733201481\u0026Signature=L~kPLqozZvyb5RVLtdAXeSwUNTUjErE3ait5vWmWdT0LqCduGPcCWBF~JGJh~U5KGe-iYaBkRFIVmgDY93puOGZxISqotzbs3lfIqPi7BFbYj-coOlhow6M0Bh~xUBAF154hwQLcsbyxzEk1zSNcxI0MIOQ4YPk0Lj5xzeoIzzjLsV1JhIPGb6B9VXFr1QeQmZCmB5ggYDJiTVlAdOoiEIBY0Pp9ddDnMUQUr-e3DnO-hCPjqXfr9p-40QPV9hHOPMnU~pSozFS7F7cwWbiBe8U6dLcdn7w8rQeWBhfJAgS8EI9P2NWnHBMpUSM4mcQ-RtnQmLJULyiIamzkEx-lxw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence"},{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision"},{"id":3147,"name":"Gesture","url":"https://www.academia.edu/Documents/in/Gesture"},{"id":17701,"name":"Gesture Recognition","url":"https://www.academia.edu/Documents/in/Gesture_Recognition"},{"id":143539,"name":"hidden Markov model","url":"https://www.academia.edu/Documents/in/hidden_Markov_model"},{"id":210122,"name":"Robustness (evolution)","url":"https://www.academia.edu/Documents/in/Robustness_evolution_"}],"urls":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> </div><div class="profile--tab_content_container js-tab-pane tab-pane" data-section-id="17423315" id="papers"><div class="js-work-strip profile--work_container" data-work-id="111609821"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609821/Challenging_3D_Head_Tracking_and_Evaluation_Using_Unconstrained_Test_Data_Set"><img alt="Research paper thumbnail of Challenging 3D Head Tracking and Evaluation Using Unconstrained Test Data Set" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609821/Challenging_3D_Head_Tracking_and_Evaluation_Using_Unconstrained_Test_Data_Set">Challenging 3D Head Tracking and Evaluation Using Unconstrained Test Data Set</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">3D face tracking using one monocular camera is an important topic, since it is useful in many dom...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">3D face tracking using one monocular camera is an important topic, since it is useful in many domains such as: video surveillance system, human machine interaction, biometrics, etc. In this paper, we propose a new 3D face tracking which is robust to large head rotations. Underlying cascaded regression approach for 2D landmark detection, we build an extension in context of 3D pose tracking. To better work with out-of-plane issues, we extend the training dataset by including a new set of synthetic images. For evaluation, we propose to use a new recording system to capture automatically face pose ground-truth, and create a new test dataset, named U3PT (Unconstrained 3D Pose Tracking). Theperformance of our method along with the state-of-the-art methods are carried out to analyze advantage as well as limitations need to be improved in the future.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609821"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609821"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609821; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609821]").text(description); $(".js-view-count[data-work-id=111609821]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609821; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609821']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609821, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=111609821]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609821,"title":"Challenging 3D Head Tracking and Evaluation Using Unconstrained Test Data Set","translated_title":"","metadata":{"abstract":"3D face tracking using one monocular camera is an important topic, since it is useful in many domains such as: video surveillance system, human machine interaction, biometrics, etc. In this paper, we propose a new 3D face tracking which is robust to large head rotations. Underlying cascaded regression approach for 2D landmark detection, we build an extension in context of 3D pose tracking. To better work with out-of-plane issues, we extend the training dataset by including a new set of synthetic images. For evaluation, we propose to use a new recording system to capture automatically face pose ground-truth, and create a new test dataset, named U3PT (Unconstrained 3D Pose Tracking). Theperformance of our method along with the state-of-the-art methods are carried out to analyze advantage as well as limitations need to be improved in the future.","publication_date":{"day":1,"month":7,"year":2017,"errors":{}}},"translated_abstract":"3D face tracking using one monocular camera is an important topic, since it is useful in many domains such as: video surveillance system, human machine interaction, biometrics, etc. In this paper, we propose a new 3D face tracking which is robust to large head rotations. Underlying cascaded regression approach for 2D landmark detection, we build an extension in context of 3D pose tracking. To better work with out-of-plane issues, we extend the training dataset by including a new set of synthetic images. For evaluation, we propose to use a new recording system to capture automatically face pose ground-truth, and create a new test dataset, named U3PT (Unconstrained 3D Pose Tracking). Theperformance of our method along with the state-of-the-art methods are carried out to analyze advantage as well as limitations need to be improved in the future.","internal_url":"https://www.academia.edu/111609821/Challenging_3D_Head_Tracking_and_Evaluation_Using_Unconstrained_Test_Data_Set","translated_internal_url":"","created_at":"2023-12-16T23:55:59.827-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[],"slug":"Challenging_3D_Head_Tracking_and_Evaluation_Using_Unconstrained_Test_Data_Set","translated_slug":"","page_count":null,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence"},{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision"},{"id":9173,"name":"Biometrics","url":"https://www.academia.edu/Documents/in/Biometrics"},{"id":822358,"name":"Ground Truth","url":"https://www.academia.edu/Documents/in/Ground_Truth"}],"urls":[{"id":37229940,"url":"https://doi.org/10.1109/iv.2017.40"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609820"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609820/Shape_related_constraints_aware_generation_of_Mechanical_Designs_through_Deep_Convolutional_GAN"><img alt="Research paper thumbnail of Shape related constraints aware generation of Mechanical Designs through Deep Convolutional GAN" class="work-thumbnail" src="https://attachments.academia-assets.com/109098709/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609820/Shape_related_constraints_aware_generation_of_Mechanical_Designs_through_Deep_Convolutional_GAN">Shape related constraints aware generation of Mechanical Designs through Deep Convolutional GAN</a></div><div class="wp-workCard_item"><span>arXiv (Cornell University)</span><span>, Oct 22, 2020</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="f71406dfe6546091ea56a2310a847033" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098709,&quot;asset_id&quot;:111609820,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098709/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609820"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609820"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609820; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609820]").text(description); $(".js-view-count[data-work-id=111609820]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609820; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609820']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609820, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "f71406dfe6546091ea56a2310a847033" } } $('.js-work-strip[data-work-id=111609820]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609820,"title":"Shape related constraints aware generation of Mechanical Designs through Deep Convolutional GAN","translated_title":"","metadata":{"publisher":"Cornell University","grobid_abstract":"Mechanical product engineering often must comply with manufacturing or geometric constraints related to the shaping process. Mechanical design hence should rely on robust and fast tools to explore complex shapes, typically for design for additive manufacturing (DfAM). Topology optimization is such a powerful tool, yet integrating geometric constraints (shape-related) into it is hard. In this work, we leverage machine learning capability to handle complex geometric and spatial correlations to integrate into the mechanical design process geometry-related constraints at the conceptual level. More precisely, we explore the generative capabilities of recent Deep Learning architectures to enhance mechanical designs, typically for additive manufacturing. In this work, we build a generative Deep-Learning-based approach of topology optimization integrating mechanical conditions in addition to one typical manufacturing condition (the complexity of a design i.e. a geometrical condition). The approach is a dual-discriminator GAN: a generator that takes as input the mechanical and geometrical conditions and outputs a 2D structure and two discriminators, one to ensure that the generated structure follows the mechanical constraints and the other to assess the geometrical constraint. We also explore the generation of designs with a non-uniform material distribution and show promising results.","publication_date":{"day":22,"month":10,"year":2020,"errors":{}},"publication_name":"arXiv (Cornell University)","grobid_abstract_attachment_id":109098709},"translated_abstract":null,"internal_url":"https://www.academia.edu/111609820/Shape_related_constraints_aware_generation_of_Mechanical_Designs_through_Deep_Convolutional_GAN","translated_internal_url":"","created_at":"2023-12-16T23:55:59.606-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098709,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098709/thumbnails/1.jpg","file_name":"2010.11833.pdf","download_url":"https://www.academia.edu/attachments/109098709/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Shape_related_constraints_aware_generati.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098709/2010.11833-libre.pdf?1702800376=\u0026response-content-disposition=attachment%3B+filename%3DShape_related_constraints_aware_generati.pdf\u0026Expires=1733201480\u0026Signature=GtX9gdbrDRRxKBy1Hyj~mDFYQ1JkVXXMvP~NW6HMiTy9anmQsIGorin57E4n906UdlAEgceBdmEyAQgsDKEZUdNbu5Esod6oPegFEbq6uHXtE7TRUS7VE53s8RDgqoYq7GWPrYYp5D1FH5sqEZ288k-ncbsJVhXUta74LXRC2mIxhPrj8MtmZhnTeBGCNRqJkpeJHErhrC4sV0wq8pR8LRMQbSTjtjARdA1WUxeJEPYhm~r7aVWa2zox0~vBg~4dg92ThwYDMr3FNo2m1qNStURGr5jUv3o-omuxinPlZh1JAapMJVp2fJsjhkZW6iRpFp81JLB45Lv0drU142WFlw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Shape_related_constraints_aware_generation_of_Mechanical_Designs_through_Deep_Convolutional_GAN","translated_slug":"","page_count":55,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098709,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098709/thumbnails/1.jpg","file_name":"2010.11833.pdf","download_url":"https://www.academia.edu/attachments/109098709/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Shape_related_constraints_aware_generati.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098709/2010.11833-libre.pdf?1702800376=\u0026response-content-disposition=attachment%3B+filename%3DShape_related_constraints_aware_generati.pdf\u0026Expires=1733201480\u0026Signature=GtX9gdbrDRRxKBy1Hyj~mDFYQ1JkVXXMvP~NW6HMiTy9anmQsIGorin57E4n906UdlAEgceBdmEyAQgsDKEZUdNbu5Esod6oPegFEbq6uHXtE7TRUS7VE53s8RDgqoYq7GWPrYYp5D1FH5sqEZ288k-ncbsJVhXUta74LXRC2mIxhPrj8MtmZhnTeBGCNRqJkpeJHErhrC4sV0wq8pR8LRMQbSTjtjARdA1WUxeJEPYhm~r7aVWa2zox0~vBg~4dg92ThwYDMr3FNo2m1qNStURGr5jUv3o-omuxinPlZh1JAapMJVp2fJsjhkZW6iRpFp81JLB45Lv0drU142WFlw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":20097,"name":"Topology Optimization","url":"https://www.academia.edu/Documents/in/Topology_Optimization"},{"id":3193313,"name":"arXiv","url":"https://www.academia.edu/Documents/in/arXiv"}],"urls":[{"id":37229938,"url":"http://arxiv.org/pdf/2010.11833"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609818"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609818/A_Simple_Human_Activity_Recognition_Technique_Using_DCT"><img alt="Research paper thumbnail of A Simple Human Activity Recognition Technique Using DCT" class="work-thumbnail" src="https://attachments.academia-assets.com/109098693/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609818/A_Simple_Human_Activity_Recognition_Technique_Using_DCT">A Simple Human Activity Recognition Technique Using DCT</a></div><div class="wp-workCard_item"><span>Advanced Concepts for Intelligent Vision Systems</span><span>, 2016</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="da65baa95a9ce7e6189c19f9fb2160ca" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098693,&quot;asset_id&quot;:111609818,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098693/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609818"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609818"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609818; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609818]").text(description); $(".js-view-count[data-work-id=111609818]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609818; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609818']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609818, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "da65baa95a9ce7e6189c19f9fb2160ca" } } $('.js-work-strip[data-work-id=111609818]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609818,"title":"A Simple Human Activity Recognition Technique Using DCT","translated_title":"","metadata":{"publisher":"Springer International Publishing","grobid_abstract":"In this paper, we present a simple new human activity recognition method using discrete cosine transform (DCT). The scheme uses the DCT coefficients extracted from silhouettes as descriptors (features) and performs frame-by-frame recognition, which make it simple and suitable for real time applications. We carried out several tests using radial basis neural network (RBF) for classification, a comparative study against stat-of-the-art methods shows that our technique is faster, simple and gives higher accuracy performance comparing to discrete transform based techniques and other methods proposed in literature.","publication_date":{"day":null,"month":null,"year":2016,"errors":{}},"publication_name":"Advanced Concepts for Intelligent Vision Systems","grobid_abstract_attachment_id":109098693},"translated_abstract":null,"internal_url":"https://www.academia.edu/111609818/A_Simple_Human_Activity_Recognition_Technique_Using_DCT","translated_internal_url":"","created_at":"2023-12-16T23:55:59.363-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098693,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098693/thumbnails/1.jpg","file_name":"978-3-319-48680-2_4.pdf","download_url":"https://www.academia.edu/attachments/109098693/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"A_Simple_Human_Activity_Recognition_Tech.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098693/978-3-319-48680-2_4-libre.pdf?1702800351=\u0026response-content-disposition=attachment%3B+filename%3DA_Simple_Human_Activity_Recognition_Tech.pdf\u0026Expires=1733201480\u0026Signature=EQt6B6FUsuPHteYqGUdSVSZIszIXjIQCwtKa8yLvX3VGd6QYthB5RJDB4nO0Zswe1bAEacBdr~V7WkmpRAvIB2DEEICukxAGiON7q0p~kAsvLXaFFV07yL1xA7THgYvRooQHc0JIbP9C85YWDhgGWDX~70MTZJibSGHvQypsefIELyTcIzz6Yw~PSTYp3lCCg7s3V4aygBEJtfmAOFB0hTPh5G3AA~F48RLu3Pg3lL9N1u74z1uIjeOdMc-O~LG7WE251NfcFaShrE9uzDNGnzPCYxYxZHQRkcTxrNxryfGpyTj7fffvx3tdC7FEzEMd-od~-Q4wkGVwB1HxEtyfnA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"A_Simple_Human_Activity_Recognition_Technique_Using_DCT","translated_slug":"","page_count":10,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098693,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098693/thumbnails/1.jpg","file_name":"978-3-319-48680-2_4.pdf","download_url":"https://www.academia.edu/attachments/109098693/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"A_Simple_Human_Activity_Recognition_Tech.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098693/978-3-319-48680-2_4-libre.pdf?1702800351=\u0026response-content-disposition=attachment%3B+filename%3DA_Simple_Human_Activity_Recognition_Tech.pdf\u0026Expires=1733201480\u0026Signature=EQt6B6FUsuPHteYqGUdSVSZIszIXjIQCwtKa8yLvX3VGd6QYthB5RJDB4nO0Zswe1bAEacBdr~V7WkmpRAvIB2DEEICukxAGiON7q0p~kAsvLXaFFV07yL1xA7THgYvRooQHc0JIbP9C85YWDhgGWDX~70MTZJibSGHvQypsefIELyTcIzz6Yw~PSTYp3lCCg7s3V4aygBEJtfmAOFB0hTPh5G3AA~F48RLu3Pg3lL9N1u74z1uIjeOdMc-O~LG7WE251NfcFaShrE9uzDNGnzPCYxYxZHQRkcTxrNxryfGpyTj7fffvx3tdC7FEzEMd-od~-Q4wkGVwB1HxEtyfnA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"},{"id":109098702,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098702/thumbnails/1.jpg","file_name":"978-3-319-48680-2_4.pdf","download_url":"https://www.academia.edu/attachments/109098702/download_file","bulk_download_file_name":"A_Simple_Human_Activity_Recognition_Tech.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098702/978-3-319-48680-2_4-libre.pdf?1702800351=\u0026response-content-disposition=attachment%3B+filename%3DA_Simple_Human_Activity_Recognition_Tech.pdf\u0026Expires=1733201480\u0026Signature=HIOGJZGk33EDofYYcston5NHrqqknsS4X0LmJujQLBwCcLK6yYK1TFKoK-WtnRyYmkVPxnsdV5QciGESe5~NrlPt1eclMavANOZe~1zIbjaVswUUym0ITGO0DJwCCsvcB-f1F8WsTSYXLS7O4XOKQ7FYWurnM60i8zrjq-nm8loGJixck1NgD6juSqn9MSgloRdd-Zd~4RekYP2P3lXqzh~9pUirI6a4xbQIknKg65CPMP2dyx-fVk8sItMd6g26rVBoSX1JasJ5Tz0T5QusgpUYSFwe5UIVElbCWpdGxeMX4D8KHyvMAwqp4cVVPBfFhx3b0dCa2d~VtHhwbyjpWQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":185596,"name":"Discrete Cosine Transform","url":"https://www.academia.edu/Documents/in/Discrete_Cosine_Transform"}],"urls":[{"id":37229937,"url":"http://link.springer.com/content/pdf/10.1007/978-3-319-48680-2_4"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609817"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609817/Evaluating_Added_Value_of_Augmented_Reality_to_Assist_Aeronautical_Maintenance_Workers_Experimentation_on_On_field_Use_Case"><img alt="Research paper thumbnail of Evaluating Added Value of Augmented Reality to Assist Aeronautical Maintenance Workers—Experimentation on On-field Use Case" class="work-thumbnail" src="https://attachments.academia-assets.com/109098772/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609817/Evaluating_Added_Value_of_Augmented_Reality_to_Assist_Aeronautical_Maintenance_Workers_Experimentation_on_On_field_Use_Case">Evaluating Added Value of Augmented Reality to Assist Aeronautical Maintenance Workers—Experimentation on On-field Use Case</a></div><div class="wp-workCard_item"><span>Virtual Reality and Augmented Reality</span><span>, 2019</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="8ba014367da6135276fead4ce09a5cc0" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098772,&quot;asset_id&quot;:111609817,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098772/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609817"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609817"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609817; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609817]").text(description); $(".js-view-count[data-work-id=111609817]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609817; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609817']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609817, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "8ba014367da6135276fead4ce09a5cc0" } } $('.js-work-strip[data-work-id=111609817]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609817,"title":"Evaluating Added Value of Augmented Reality to Assist Aeronautical Maintenance Workers—Experimentation on On-field Use Case","translated_title":"","metadata":{"publisher":"Springer International Publishing","grobid_abstract":"Augmented Reality (AR) technology facilitates interactions with information and understanding of complex situations. Aeronautical Maintenance combines complexity induced by the variety of products and constraints associated to aeronautic sector and the environment of maintenance. AR tools seem well indicated to solve constraints of productivity and quality on the aeronautical maintenance activities by simplifying data interactions for the workers. However, few evaluations of AR have been done in real processes due to the difficulty of integrating the technology without proper tools for deployment and assessing the results. This paper proposes a method to select suitable criteria for AR evaluation in industrial environment and to deploy AR solutions suited to assist maintenance workers. These are used to set up on-field experiments that demonstrate benefits of AR on process and user point of view for different profiles of workers. Further work will consist on using these elements to extend results to AR evaluation on the whole aeronautical maintenance process. A classification of maintenance activities linked to workers specific needs will lead to prediction of the value that augmented reality would bring to each activity.","publication_date":{"day":null,"month":null,"year":2019,"errors":{}},"publication_name":"Virtual Reality and Augmented Reality","grobid_abstract_attachment_id":109098772},"translated_abstract":null,"internal_url":"https://www.academia.edu/111609817/Evaluating_Added_Value_of_Augmented_Reality_to_Assist_Aeronautical_Maintenance_Workers_Experimentation_on_On_field_Use_Case","translated_internal_url":"","created_at":"2023-12-16T23:55:59.149-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098772,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098772/thumbnails/1.jpg","file_name":"LISPEN_EUROVR_2019_LOIZEAU.pdf","download_url":"https://www.academia.edu/attachments/109098772/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Evaluating_Added_Value_of_Augmented_Real.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098772/LISPEN_EUROVR_2019_LOIZEAU-libre.pdf?1702800350=\u0026response-content-disposition=attachment%3B+filename%3DEvaluating_Added_Value_of_Augmented_Real.pdf\u0026Expires=1733201480\u0026Signature=gakZpyn5rTY4nRAog5fDUMKClcaeZtdEO1JdU80p4w3YdTklTBWZdpDmLj1nFMlHHZjoGtN5ZFG-UpXlu47wHpMxvMjLQs9f1qXAanUDjE~acwvQA5jOfPEXJuWhPX0sF95oSAzAWdBrDIn4lnRVKLWtUDoOJW9a2DMV5HMOgRKQW-~hDnmRmYs4f5EV3yXYw5Ws34WjeChOyJr7AEpB2-zkk5TgTxGaqXWaLXBJkwbqkaS3I1~-mmsiV1R7DujImxOAmzhBomTYCrcS3-fAWI8Of9HjQAraefiiJhQLxJcrF~oCBnvSLL0p72NowjXF17xsLTaZ3oVXZVD6TndNvg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Evaluating_Added_Value_of_Augmented_Reality_to_Assist_Aeronautical_Maintenance_Workers_Experimentation_on_On_field_Use_Case","translated_slug":"","page_count":20,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098772,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098772/thumbnails/1.jpg","file_name":"LISPEN_EUROVR_2019_LOIZEAU.pdf","download_url":"https://www.academia.edu/attachments/109098772/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Evaluating_Added_Value_of_Augmented_Real.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098772/LISPEN_EUROVR_2019_LOIZEAU-libre.pdf?1702800350=\u0026response-content-disposition=attachment%3B+filename%3DEvaluating_Added_Value_of_Augmented_Real.pdf\u0026Expires=1733201480\u0026Signature=gakZpyn5rTY4nRAog5fDUMKClcaeZtdEO1JdU80p4w3YdTklTBWZdpDmLj1nFMlHHZjoGtN5ZFG-UpXlu47wHpMxvMjLQs9f1qXAanUDjE~acwvQA5jOfPEXJuWhPX0sF95oSAzAWdBrDIn4lnRVKLWtUDoOJW9a2DMV5HMOgRKQW-~hDnmRmYs4f5EV3yXYw5Ws34WjeChOyJr7AEpB2-zkk5TgTxGaqXWaLXBJkwbqkaS3I1~-mmsiV1R7DujImxOAmzhBomTYCrcS3-fAWI8Of9HjQAraefiiJhQLxJcrF~oCBnvSLL0p72NowjXF17xsLTaZ3oVXZVD6TndNvg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":5673,"name":"Augmented Reality","url":"https://www.academia.edu/Documents/in/Augmented_Reality"},{"id":345840,"name":"Criteria","url":"https://www.academia.edu/Documents/in/Criteria"},{"id":453754,"name":"Augmented and Virtual Reality for Civil and Building Engineering","url":"https://www.academia.edu/Documents/in/Augmented_and_Virtual_Reality_for_Civil_and_Building_Engineering"},{"id":783265,"name":"Added Value","url":"https://www.academia.edu/Documents/in/Added_Value"},{"id":1268642,"name":"Software Deployment","url":"https://www.academia.edu/Documents/in/Software_Deployment"}],"urls":[{"id":37229935,"url":"http://link.springer.com/content/pdf/10.1007/978-3-030-31908-3_10"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609816"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609816/Pedestrian_Using_Catadioptric_Sensor_12"><img alt="Research paper thumbnail of Pedestrian Using Catadioptric Sensor 12" class="work-thumbnail" src="https://attachments.academia-assets.com/109098687/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609816/Pedestrian_Using_Catadioptric_Sensor_12">Pedestrian Using Catadioptric Sensor 12</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">We investigate the detection of person in the omnidirectional images, adopting a linear SVM. We h...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">We investigate the detection of person in the omnidirectional images, adopting a linear SVM. We have implemented HOG-based descriptors, for omnidirectional and spherical images. In this paper we studied the influence of each parameter in our algorithm on the performances of person detections in catadioptric images. However, few studies have elaborated the problem of human detection using this type of cameras; therefore we have set up our own test base. Our results show that our detector can robustly detect people in omnidirectional images, as soon as the algorithm is adapted to the distortions introduced by the use of the omnidirectional camera.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="2d08d36df00d97618e86dc94747af02f" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098687,&quot;asset_id&quot;:111609816,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098687/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609816"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609816"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609816; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609816]").text(description); $(".js-view-count[data-work-id=111609816]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609816; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609816']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609816, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "2d08d36df00d97618e86dc94747af02f" } } $('.js-work-strip[data-work-id=111609816]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609816,"title":"Pedestrian Using Catadioptric Sensor 12","translated_title":"","metadata":{"abstract":"We investigate the detection of person in the omnidirectional images, adopting a linear SVM. We have implemented HOG-based descriptors, for omnidirectional and spherical images. In this paper we studied the influence of each parameter in our algorithm on the performances of person detections in catadioptric images. However, few studies have elaborated the problem of human detection using this type of cameras; therefore we have set up our own test base. Our results show that our detector can robustly detect people in omnidirectional images, as soon as the algorithm is adapted to the distortions introduced by the use of the omnidirectional camera.","publication_date":{"day":null,"month":null,"year":2018,"errors":{}}},"translated_abstract":"We investigate the detection of person in the omnidirectional images, adopting a linear SVM. We have implemented HOG-based descriptors, for omnidirectional and spherical images. In this paper we studied the influence of each parameter in our algorithm on the performances of person detections in catadioptric images. However, few studies have elaborated the problem of human detection using this type of cameras; therefore we have set up our own test base. Our results show that our detector can robustly detect people in omnidirectional images, as soon as the algorithm is adapted to the distortions introduced by the use of the omnidirectional camera.","internal_url":"https://www.academia.edu/111609816/Pedestrian_Using_Catadioptric_Sensor_12","translated_internal_url":"","created_at":"2023-12-16T23:55:58.948-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098687,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098687/thumbnails/1.jpg","file_name":"2Vol96No8.pdf","download_url":"https://www.academia.edu/attachments/109098687/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Pedestrian_Using_Catadioptric_Sensor_12.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098687/2Vol96No8-libre.pdf?1702800377=\u0026response-content-disposition=attachment%3B+filename%3DPedestrian_Using_Catadioptric_Sensor_12.pdf\u0026Expires=1733201480\u0026Signature=W5txYwR5ZbQtC1qDbXWQIV6Ib6eEONivSh6oBhsCAAXRopWCVdFiCv42~EZya3xYxAJEa3vgpwDwYBUCE-n22p0gWZybuBW7VkismYfk3Z3M5r3FcUYLxC441MtTsbxynDuvlw1CJPnxOSTL-rz0XKgpPkXPEUUvps3KI44DXd9h3nJi4DNT86TyjIz7WUAjTP~uwqIvIZKPEl5U4yWfMmjhPsnHdRORtbPNFp8FIINVUcqzzwFmYPuRNYZByLr7WqE5Z1yIFxCKTYhEp8hD-Hj9WGv0xuCA3r7vtCM23vBmlYmN6J~nEnP~HKDOBXNg9FSWJB0rwlZAH6vsBEadEQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Pedestrian_Using_Catadioptric_Sensor_12","translated_slug":"","page_count":10,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098687,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098687/thumbnails/1.jpg","file_name":"2Vol96No8.pdf","download_url":"https://www.academia.edu/attachments/109098687/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Pedestrian_Using_Catadioptric_Sensor_12.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098687/2Vol96No8-libre.pdf?1702800377=\u0026response-content-disposition=attachment%3B+filename%3DPedestrian_Using_Catadioptric_Sensor_12.pdf\u0026Expires=1733201480\u0026Signature=W5txYwR5ZbQtC1qDbXWQIV6Ib6eEONivSh6oBhsCAAXRopWCVdFiCv42~EZya3xYxAJEa3vgpwDwYBUCE-n22p0gWZybuBW7VkismYfk3Z3M5r3FcUYLxC441MtTsbxynDuvlw1CJPnxOSTL-rz0XKgpPkXPEUUvps3KI44DXd9h3nJi4DNT86TyjIz7WUAjTP~uwqIvIZKPEl5U4yWfMmjhPsnHdRORtbPNFp8FIINVUcqzzwFmYPuRNYZByLr7WqE5Z1yIFxCKTYhEp8hD-Hj9WGv0xuCA3r7vtCM23vBmlYmN6J~nEnP~HKDOBXNg9FSWJB0rwlZAH6vsBEadEQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"},{"id":109098701,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098701/thumbnails/1.jpg","file_name":"2Vol96No8.pdf","download_url":"https://www.academia.edu/attachments/109098701/download_file","bulk_download_file_name":"Pedestrian_Using_Catadioptric_Sensor_12.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098701/2Vol96No8-libre.pdf?1702800378=\u0026response-content-disposition=attachment%3B+filename%3DPedestrian_Using_Catadioptric_Sensor_12.pdf\u0026Expires=1733201480\u0026Signature=af6tK71rJ7V6wvmWmL00ga5FRO3q9A2O3u3ANlQVpVEWdkXohls-~WTfWSwalleRg-ijIAI242KWWvbLv1XDjHCgyMJmXwexpBKA4pHlZ9otLgLROHzPOEytX2uRM6w9RzfirY1DVJ7bM~0E4KxbOz0H5ZQNH6gBkMcoxpuV-l4LOL9fmmyLpJPOx3t3clYvEMZvEiVwQVel0bDfdI4O8itHrJFMsd9CQCOxLaTXzz93NpFaX8Sb8LyTU39Y4imT8jT6zkpNyeldBoK22bYc0xkkceh1u5HxcCJnQwUkeG5l0UtWh-XYxHRDG3bVgXxp6NmzBU~qMX5zNLJkzcXHGg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[],"urls":[{"id":37229933,"url":"http://www.jatit.org/volumes/Vol96No8/2Vol96No8.pdf"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609814"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609814/Augmented_Reality_assistance_for_R_and_D_assembly_in_Aeronautics"><img alt="Research paper thumbnail of Augmented Reality assistance for R&amp;D assembly in Aeronautics" class="work-thumbnail" src="https://attachments.academia-assets.com/109098692/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609814/Augmented_Reality_assistance_for_R_and_D_assembly_in_Aeronautics">Augmented Reality assistance for R&amp;D assembly in Aeronautics</a></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="b8321624b867f8c22fc9e24801590d6f" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098692,&quot;asset_id&quot;:111609814,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098692/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609814"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609814"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609814; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609814]").text(description); $(".js-view-count[data-work-id=111609814]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609814; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609814']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609814, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "b8321624b867f8c22fc9e24801590d6f" } } $('.js-work-strip[data-work-id=111609814]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609814,"title":"Augmented Reality assistance for R\u0026D assembly in Aeronautics","translated_title":"","metadata":{"publication_date":{"day":null,"month":null,"year":2018,"errors":{}}},"translated_abstract":null,"internal_url":"https://www.academia.edu/111609814/Augmented_Reality_assistance_for_R_and_D_assembly_in_Aeronautics","translated_internal_url":"","created_at":"2023-12-16T23:55:58.744-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098692,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098692/thumbnails/1.jpg","file_name":"LISPEN_EUROVR82018_ABABSA.pdf","download_url":"https://www.academia.edu/attachments/109098692/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Augmented_Reality_assistance_for_R_and_D.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098692/LISPEN_EUROVR82018_ABABSA-libre.pdf?1702800348=\u0026response-content-disposition=attachment%3B+filename%3DAugmented_Reality_assistance_for_R_and_D.pdf\u0026Expires=1733201480\u0026Signature=Uzyc1RG4turQjc88aKxgcY2BTOmhLV-zEh5msuyu64-mFHZ~LPeiVs06xZxSWrOudwOJM6AvxG3hKMfH2TS0Uqwig~Lj981NGv5Cde5vPpue3QtLFSol0GlFPtzbWf62UZrwuoKvTkKx~cCec3ORh9H9KcAJ5CFFf7SdmTGGN7e2b2543e9nf-mW6MobJAu20JgiD3QcdLwyoOuG3S3m8M6HOvmSeqDL6CQI5zmjLXil2qmMk-dAFlKfO~7~P988dBsEO2VNtHUni6Eh9Ot74hUR-0RDxjRK2CXv2BGL3jNtJCG67FSApaam06nM~JLewBgMiECSXmtDP99FWWmhXA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Augmented_Reality_assistance_for_R_and_D_assembly_in_Aeronautics","translated_slug":"","page_count":3,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098692,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098692/thumbnails/1.jpg","file_name":"LISPEN_EUROVR82018_ABABSA.pdf","download_url":"https://www.academia.edu/attachments/109098692/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Augmented_Reality_assistance_for_R_and_D.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098692/LISPEN_EUROVR82018_ABABSA-libre.pdf?1702800348=\u0026response-content-disposition=attachment%3B+filename%3DAugmented_Reality_assistance_for_R_and_D.pdf\u0026Expires=1733201480\u0026Signature=Uzyc1RG4turQjc88aKxgcY2BTOmhLV-zEh5msuyu64-mFHZ~LPeiVs06xZxSWrOudwOJM6AvxG3hKMfH2TS0Uqwig~Lj981NGv5Cde5vPpue3QtLFSol0GlFPtzbWf62UZrwuoKvTkKx~cCec3ORh9H9KcAJ5CFFf7SdmTGGN7e2b2543e9nf-mW6MobJAu20JgiD3QcdLwyoOuG3S3m8M6HOvmSeqDL6CQI5zmjLXil2qmMk-dAFlKfO~7~P988dBsEO2VNtHUni6Eh9Ot74hUR-0RDxjRK2CXv2BGL3jNtJCG67FSApaam06nM~JLewBgMiECSXmtDP99FWWmhXA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"},{"id":109098699,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098699/thumbnails/1.jpg","file_name":"LISPEN_EUROVR82018_ABABSA.pdf","download_url":"https://www.academia.edu/attachments/109098699/download_file","bulk_download_file_name":"Augmented_Reality_assistance_for_R_and_D.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098699/LISPEN_EUROVR82018_ABABSA-libre.pdf?1702800347=\u0026response-content-disposition=attachment%3B+filename%3DAugmented_Reality_assistance_for_R_and_D.pdf\u0026Expires=1733201480\u0026Signature=OOvsRcIkVPclLA3DALDb3uzrIVxEyUHgVeznXfk75DzLc-466VvwrZoEf7Om0hiIIc~2Fj6HGzI5TnnF5nz3~FOvdAD7VvNvCLPgp-WkiCKZSJ7gubIbv5HMrAq~k1m23faVauyNHEoldXFXv6TPSQn3yiNIIIl4xmtpqwSvAayOJAJAHV6aVBAgvSO7BsptSLb1l-SpGrUif~jnKWi91iHZ-~dO0I5OoLFNGGzuLpbHQ~045VNpXHcpo7kCoW-fGg8hAx4aoG3H3NphfuBDRLcwTE8gzTaYpZxM4eqQxWcAfdnDFeIEU2DovZZYoXHvPR0r7yTyl8WZrmcK5JC94g__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":48,"name":"Engineering","url":"https://www.academia.edu/Documents/in/Engineering"},{"id":88,"name":"Aerospace Engineering","url":"https://www.academia.edu/Documents/in/Aerospace_Engineering"},{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":5673,"name":"Augmented Reality","url":"https://www.academia.edu/Documents/in/Augmented_Reality"},{"id":89988,"name":"Assembly","url":"https://www.academia.edu/Documents/in/Assembly"},{"id":181995,"name":"Aeronautics","url":"https://www.academia.edu/Documents/in/Aeronautics"},{"id":1133084,"name":"Factory of the Future","url":"https://www.academia.edu/Documents/in/Factory_of_the_Future"}],"urls":[{"id":37229932,"url":"https://sam.ensam.eu/bitstream/handle/10985/14160/LISPEN_EUROVR82018_ABABSA.pdf?isAllowed=y\u0026sequence=1"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609812"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609812/Usability_of_Augmented_Reality_in_Aeronautic_Maintenance_Repair_and_Overhaul"><img alt="Research paper thumbnail of Usability of Augmented Reality in Aeronautic Maintenance, Repair and Overhaul" class="work-thumbnail" src="https://attachments.academia-assets.com/109098694/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609812/Usability_of_Augmented_Reality_in_Aeronautic_Maintenance_Repair_and_Overhaul">Usability of Augmented Reality in Aeronautic Maintenance, Repair and Overhaul</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Augmented Reality (AR) is a strong growing research topic in several areas including industry, tr...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Augmented Reality (AR) is a strong growing research topic in several areas including industry, training, art and entertainment. AR can help users to achieve very complex tasks by enhancing their vision with useful and well-adapted information. This paper deals with evaluating the usability of AR in aeronautic maintenance training tasks. A case study in the on-site maintenance department was conducted using an augmented reality application, involving operators at several levels of expertise. Obtained results highlighted the full efficacy of AR in the field of aeronautic maintenance. CCS Concepts • Human-centered computing → Mixed / augmented reality;</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="8347157cb8c47b51c9486658ecb28495" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098694,&quot;asset_id&quot;:111609812,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098694/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609812"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609812"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609812; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609812]").text(description); $(".js-view-count[data-work-id=111609812]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609812; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609812']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609812, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "8347157cb8c47b51c9486658ecb28495" } } $('.js-work-strip[data-work-id=111609812]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609812,"title":"Usability of Augmented Reality in Aeronautic Maintenance, Repair and Overhaul","translated_title":"","metadata":{"abstract":"Augmented Reality (AR) is a strong growing research topic in several areas including industry, training, art and entertainment. AR can help users to achieve very complex tasks by enhancing their vision with useful and well-adapted information. This paper deals with evaluating the usability of AR in aeronautic maintenance training tasks. A case study in the on-site maintenance department was conducted using an augmented reality application, involving operators at several levels of expertise. Obtained results highlighted the full efficacy of AR in the field of aeronautic maintenance. CCS Concepts • Human-centered computing → Mixed / augmented reality;","publisher":"ICAT-EGVE","publication_date":{"day":null,"month":null,"year":2018,"errors":{}}},"translated_abstract":"Augmented Reality (AR) is a strong growing research topic in several areas including industry, training, art and entertainment. AR can help users to achieve very complex tasks by enhancing their vision with useful and well-adapted information. This paper deals with evaluating the usability of AR in aeronautic maintenance training tasks. A case study in the on-site maintenance department was conducted using an augmented reality application, involving operators at several levels of expertise. Obtained results highlighted the full efficacy of AR in the field of aeronautic maintenance. CCS Concepts • Human-centered computing → Mixed / augmented reality;","internal_url":"https://www.academia.edu/111609812/Usability_of_Augmented_Reality_in_Aeronautic_Maintenance_Repair_and_Overhaul","translated_internal_url":"","created_at":"2023-12-16T23:55:58.522-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098694,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098694/thumbnails/1.jpg","file_name":"LISPEN_ICAT-EGVE_2018_ABABSA.pdf","download_url":"https://www.academia.edu/attachments/109098694/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Usability_of_Augmented_Reality_in_Aerona.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098694/LISPEN_ICAT-EGVE_2018_ABABSA-libre.pdf?1702800350=\u0026response-content-disposition=attachment%3B+filename%3DUsability_of_Augmented_Reality_in_Aerona.pdf\u0026Expires=1733201480\u0026Signature=Su7b4wWCWdt60tgbyqfw716wNX7AVPNmw5WQT~GgheBN4J0YITCvGGFENHAyjNqMIOPyAfeiwuTmMSb~ViVUiWTxBWyQ6Hj10GXG1bVVmmoW2IqDBhmUGLZMmcIW51by2NEEN~3Pjjj4Ic4Qu~lPSN3k~Kd2h2aVsqSoYPP~vN2mYfCVnldZOBqODsBReT1jB5d7kYhcflm~I4-cDMkwg6ol52i9qZhTpaalBQzAafWp-nUBe2H8Z6yZXJZfu66nV2UIsDSJGIYAX0Em76-Oa46SYjX-rzP9Hri1Rq0Uh4NVLJC4QlbPnqiNAeNcLJCEX5QQWyuMUiC3XUXgIpf4Zw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Usability_of_Augmented_Reality_in_Aeronautic_Maintenance_Repair_and_Overhaul","translated_slug":"","page_count":5,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098694,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098694/thumbnails/1.jpg","file_name":"LISPEN_ICAT-EGVE_2018_ABABSA.pdf","download_url":"https://www.academia.edu/attachments/109098694/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Usability_of_Augmented_Reality_in_Aerona.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098694/LISPEN_ICAT-EGVE_2018_ABABSA-libre.pdf?1702800350=\u0026response-content-disposition=attachment%3B+filename%3DUsability_of_Augmented_Reality_in_Aerona.pdf\u0026Expires=1733201480\u0026Signature=Su7b4wWCWdt60tgbyqfw716wNX7AVPNmw5WQT~GgheBN4J0YITCvGGFENHAyjNqMIOPyAfeiwuTmMSb~ViVUiWTxBWyQ6Hj10GXG1bVVmmoW2IqDBhmUGLZMmcIW51by2NEEN~3Pjjj4Ic4Qu~lPSN3k~Kd2h2aVsqSoYPP~vN2mYfCVnldZOBqODsBReT1jB5d7kYhcflm~I4-cDMkwg6ol52i9qZhTpaalBQzAafWp-nUBe2H8Z6yZXJZfu66nV2UIsDSJGIYAX0Em76-Oa46SYjX-rzP9Hri1Rq0Uh4NVLJC4QlbPnqiNAeNcLJCEX5QQWyuMUiC3XUXgIpf4Zw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"},{"id":109098695,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098695/thumbnails/1.jpg","file_name":"LISPEN_ICAT-EGVE_2018_ABABSA.pdf","download_url":"https://www.academia.edu/attachments/109098695/download_file","bulk_download_file_name":"Usability_of_Augmented_Reality_in_Aerona.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098695/LISPEN_ICAT-EGVE_2018_ABABSA-libre.pdf?1702800348=\u0026response-content-disposition=attachment%3B+filename%3DUsability_of_Augmented_Reality_in_Aerona.pdf\u0026Expires=1733201480\u0026Signature=U4oyTgGiTPxseTeWQNUkuAYCDQc4RNPEAHdHGVsSu94fDoE1fb68-C6YzdkKLTd3SJNt48WiPZAE1NWcxzsnwF6V4FZh3vjroBgSPkP1kXCF6gI2Io4ZARiyLYyufbmfBxc9e0xOrn0wRjE2OEzIZLWMloF7RbfwaImaMRo2r3hE9bpckJXDHeyzXxljR8faM8faQkoIZr2zfddvEqkyND6uXQMOcTOVqrBj3i8xQJzay-66hwl54NsaD-FltxrJ1GCIai-AbWirj-2XlZWK82CrwkzLDHAsfewMVIZC0GU4ppDZWnVdpt914jYtB~Za56T1GhEFtYpJLDHuI5Cdsg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":48,"name":"Engineering","url":"https://www.academia.edu/Documents/in/Engineering"},{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":472,"name":"Human Computer Interaction","url":"https://www.academia.edu/Documents/in/Human_Computer_Interaction"},{"id":1615,"name":"Usability","url":"https://www.academia.edu/Documents/in/Usability"},{"id":5673,"name":"Augmented Reality","url":"https://www.academia.edu/Documents/in/Augmented_Reality"},{"id":87372,"name":"Maintenance","url":"https://www.academia.edu/Documents/in/Maintenance"}],"urls":[{"id":37229931,"url":"https://sam.ensam.eu/bitstream/handle/10985/14360/LISPEN_ICAT-EGVE_2018_ABABSA.pdf?isAllowed=y\u0026sequence=4"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609811"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609811/Free_Hand_Based_3D_Interaction_in_Optical_See_Through_Augmented_Reality_Using_Leap_Motion"><img alt="Research paper thumbnail of Free Hand-Based 3D Interaction in Optical See-Through Augmented Reality Using Leap Motion" class="work-thumbnail" src="https://attachments.academia-assets.com/109098696/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609811/Free_Hand_Based_3D_Interaction_in_Optical_See_Through_Augmented_Reality_Using_Leap_Motion">Free Hand-Based 3D Interaction in Optical See-Through Augmented Reality Using Leap Motion</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">In augmented reality environments, the natural hand interaction between a virtual object and the ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">In augmented reality environments, the natural hand interaction between a virtual object and the user is a major issue to manipulate a rendered object in a convenient way. Microsoft’s HoloLens (Microsoft 2018) is an innovative augmented reality (AR) device that has provided an impressive experience for the user. However, the gesture interactions offered to the user are very limited. HoloLens currently recognizes two core component gestures: Air tap and Bloom. To solve this issue, we propose to integrate a Leap Motion Controller (LMC) within the HoloLens device (Figure 1). We thus used 3D hand and finger tracking provided by the LMC (Lu &amp; al., 2016) to propose new free hand-based interaction more natural and intuitive. We implemented three fully 3D techniques for selection, translation and rotation manipulation. In this work, we first investigated how to combine the two devices to get them working together in real time, and then we evaluated the proposed 3D hand interactions.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="de7247812016efae9b19291984428968" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098696,&quot;asset_id&quot;:111609811,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098696/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609811"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609811"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609811; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609811]").text(description); $(".js-view-count[data-work-id=111609811]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609811; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609811']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609811, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "de7247812016efae9b19291984428968" } } $('.js-work-strip[data-work-id=111609811]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609811,"title":"Free Hand-Based 3D Interaction in Optical See-Through Augmented Reality Using Leap Motion","translated_title":"","metadata":{"abstract":"In augmented reality environments, the natural hand interaction between a virtual object and the user is a major issue to manipulate a rendered object in a convenient way. Microsoft’s HoloLens (Microsoft 2018) is an innovative augmented reality (AR) device that has provided an impressive experience for the user. However, the gesture interactions offered to the user are very limited. HoloLens currently recognizes two core component gestures: Air tap and Bloom. To solve this issue, we propose to integrate a Leap Motion Controller (LMC) within the HoloLens device (Figure 1). We thus used 3D hand and finger tracking provided by the LMC (Lu \u0026 al., 2016) to propose new free hand-based interaction more natural and intuitive. We implemented three fully 3D techniques for selection, translation and rotation manipulation. In this work, we first investigated how to combine the two devices to get them working together in real time, and then we evaluated the proposed 3D hand interactions.","publication_date":{"day":null,"month":null,"year":2018,"errors":{}}},"translated_abstract":"In augmented reality environments, the natural hand interaction between a virtual object and the user is a major issue to manipulate a rendered object in a convenient way. Microsoft’s HoloLens (Microsoft 2018) is an innovative augmented reality (AR) device that has provided an impressive experience for the user. However, the gesture interactions offered to the user are very limited. HoloLens currently recognizes two core component gestures: Air tap and Bloom. To solve this issue, we propose to integrate a Leap Motion Controller (LMC) within the HoloLens device (Figure 1). We thus used 3D hand and finger tracking provided by the LMC (Lu \u0026 al., 2016) to propose new free hand-based interaction more natural and intuitive. We implemented three fully 3D techniques for selection, translation and rotation manipulation. In this work, we first investigated how to combine the two devices to get them working together in real time, and then we evaluated the proposed 3D hand interactions.","internal_url":"https://www.academia.edu/111609811/Free_Hand_Based_3D_Interaction_in_Optical_See_Through_Augmented_Reality_Using_Leap_Motion","translated_internal_url":"","created_at":"2023-12-16T23:55:57.410-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098696,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098696/thumbnails/1.jpg","file_name":"LISPEN_EuroVR_2018_ABABSA.pdf","download_url":"https://www.academia.edu/attachments/109098696/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Free_Hand_Based_3D_Interaction_in_Optica.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098696/LISPEN_EuroVR_2018_ABABSA-libre.pdf?1702800349=\u0026response-content-disposition=attachment%3B+filename%3DFree_Hand_Based_3D_Interaction_in_Optica.pdf\u0026Expires=1733201480\u0026Signature=LPrk--QNFFIaDXJ~evWsFlGW94mvXT51WDmaQCgPD49ovrEMX3hSxyww6eLVzAYcpdJFunBJnlNM~Fxo9oGZwielyjRrt2vg6Eze2M2hAVQzWSNu15aIV3Vo3H5E0~5YjrNI4jit4~haBQ~xcrKuwezVRmahWpWIZF4If4GlFCHAwWBaZLqoOPGrFEUDT9-9U07t-ysC-biT5t~Hw--RzGnRHUaxD3qoO6f-R8QcW9wfmSOQAFCJFwz0KqmtwhAcY2k82TozGFv1yuJH4xDV2JtCbcdq3-~z6kyaggda60BcsAYyGZP1MyubSmNuZLnsoz1GE7dpSW4Pt24qOvYmZg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Free_Hand_Based_3D_Interaction_in_Optical_See_Through_Augmented_Reality_Using_Leap_Motion","translated_slug":"","page_count":3,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098696,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098696/thumbnails/1.jpg","file_name":"LISPEN_EuroVR_2018_ABABSA.pdf","download_url":"https://www.academia.edu/attachments/109098696/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Free_Hand_Based_3D_Interaction_in_Optica.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098696/LISPEN_EuroVR_2018_ABABSA-libre.pdf?1702800349=\u0026response-content-disposition=attachment%3B+filename%3DFree_Hand_Based_3D_Interaction_in_Optica.pdf\u0026Expires=1733201480\u0026Signature=LPrk--QNFFIaDXJ~evWsFlGW94mvXT51WDmaQCgPD49ovrEMX3hSxyww6eLVzAYcpdJFunBJnlNM~Fxo9oGZwielyjRrt2vg6Eze2M2hAVQzWSNu15aIV3Vo3H5E0~5YjrNI4jit4~haBQ~xcrKuwezVRmahWpWIZF4If4GlFCHAwWBaZLqoOPGrFEUDT9-9U07t-ysC-biT5t~Hw--RzGnRHUaxD3qoO6f-R8QcW9wfmSOQAFCJFwz0KqmtwhAcY2k82TozGFv1yuJH4xDV2JtCbcdq3-~z6kyaggda60BcsAYyGZP1MyubSmNuZLnsoz1GE7dpSW4Pt24qOvYmZg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"},{"id":109098700,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098700/thumbnails/1.jpg","file_name":"LISPEN_EuroVR_2018_ABABSA.pdf","download_url":"https://www.academia.edu/attachments/109098700/download_file","bulk_download_file_name":"Free_Hand_Based_3D_Interaction_in_Optica.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098700/LISPEN_EuroVR_2018_ABABSA-libre.pdf?1702800347=\u0026response-content-disposition=attachment%3B+filename%3DFree_Hand_Based_3D_Interaction_in_Optica.pdf\u0026Expires=1733201480\u0026Signature=UmV~UwXUNN3tKsXgLzAN3haUO0DJ3LBWW7nsk6uuD-reFaZBByxdI7dzkejahlZxGvfd6N-qdzRVmIb1OE-TVtpbBfglaz9r~I5XzHbztJ8Ki3T6HDxN61rraFSVSzKj~k6Xp4v2WT~krCzRtWs60W10vqQ3j~ZJB6TjbA-wxrdB0753WHj-4tk12zZ~kUGcKPrn1Z4aF8K4XYgc-9C8fbzf3EBiATSd7g2vJOdJ7u0YOpwpeu2~pwzcTQvFiY0djejRIxHXyYwZjfKHYxz7fJh-HVRDudV2qRQSDOYRabwgIuzGTehx0pggcF~h678jmmoVME8jBTNTFFBeXpMC4w__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":5673,"name":"Augmented Reality","url":"https://www.academia.edu/Documents/in/Augmented_Reality"},{"id":96156,"name":"Natural interaction","url":"https://www.academia.edu/Documents/in/Natural_interaction"},{"id":96893,"name":"Calibration","url":"https://www.academia.edu/Documents/in/Calibration"},{"id":1167359,"name":"Leap Motion","url":"https://www.academia.edu/Documents/in/Leap_Motion"},{"id":2668784,"name":"HoloLens","url":"https://www.academia.edu/Documents/in/HoloLens-1"}],"urls":[{"id":37229930,"url":"https://sam.ensam.eu/bitstream/handle/10985/14475/LISPEN_EuroVR_2018_ABABSA.pdf?isAllowed=y\u0026sequence=1"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609810"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609810/Reconnaissance_3D_des_Gestes_pour_l_Interaction_Naturelle_Homme_Robot"><img alt="Research paper thumbnail of Reconnaissance 3D des Gestes pour l’Interaction Naturelle Homme Robot" class="work-thumbnail" src="https://attachments.academia-assets.com/109098684/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609810/Reconnaissance_3D_des_Gestes_pour_l_Interaction_Naturelle_Homme_Robot">Reconnaissance 3D des Gestes pour l’Interaction Naturelle Homme Robot</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Dans ce papier, nous proposons une methode de reconnaissance 3D des gestes pour l’interaction hom...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Dans ce papier, nous proposons une methode de reconnaissance 3D des gestes pour l’interaction homme robot (HRI) basee sur l’information de profondeur fournie par la Kinect. Le suivi du corps est realise avec l’algorithme Skeleton fourni par le Kinect SDK. L’idee de ce travail est de calculer les angles des articulations de la partie supe- rieure du corps durant l’execution du geste. Les variations de ces angles seront les entrees des Modeles de Markov Caches afin de reconnaitre les gestes dynamiques. Les re- sultats montrent que notre methode est tres robuste ; elle necessite peu de pretraitements et n’est pas influencee par les conditions de l’environnement comme les changements d’eclairage et la complexite de la scene.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="39bd8ec1c810cc4c7b6602f4e0bd7f4a" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098684,&quot;asset_id&quot;:111609810,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098684/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609810"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609810"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609810; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609810]").text(description); $(".js-view-count[data-work-id=111609810]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609810; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609810']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609810, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "39bd8ec1c810cc4c7b6602f4e0bd7f4a" } } $('.js-work-strip[data-work-id=111609810]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609810,"title":"Reconnaissance 3D des Gestes pour l’Interaction Naturelle Homme Robot","translated_title":"","metadata":{"abstract":"Dans ce papier, nous proposons une methode de reconnaissance 3D des gestes pour l’interaction homme robot (HRI) basee sur l’information de profondeur fournie par la Kinect. Le suivi du corps est realise avec l’algorithme Skeleton fourni par le Kinect SDK. L’idee de ce travail est de calculer les angles des articulations de la partie supe- rieure du corps durant l’execution du geste. Les variations de ces angles seront les entrees des Modeles de Markov Caches afin de reconnaitre les gestes dynamiques. Les re- sultats montrent que notre methode est tres robuste ; elle necessite peu de pretraitements et n’est pas influencee par les conditions de l’environnement comme les changements d’eclairage et la complexite de la scene.","publication_date":{"day":null,"month":null,"year":2015,"errors":{}}},"translated_abstract":"Dans ce papier, nous proposons une methode de reconnaissance 3D des gestes pour l’interaction homme robot (HRI) basee sur l’information de profondeur fournie par la Kinect. Le suivi du corps est realise avec l’algorithme Skeleton fourni par le Kinect SDK. L’idee de ce travail est de calculer les angles des articulations de la partie supe- rieure du corps durant l’execution du geste. Les variations de ces angles seront les entrees des Modeles de Markov Caches afin de reconnaitre les gestes dynamiques. Les re- sultats montrent que notre methode est tres robuste ; elle necessite peu de pretraitements et n’est pas influencee par les conditions de l’environnement comme les changements d’eclairage et la complexite de la scene.","internal_url":"https://www.academia.edu/111609810/Reconnaissance_3D_des_Gestes_pour_l_Interaction_Naturelle_Homme_Robot","translated_internal_url":"","created_at":"2023-12-16T23:55:57.098-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098684,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098684/thumbnails/1.jpg","file_name":"document.pdf","download_url":"https://www.academia.edu/attachments/109098684/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Reconnaissance_3D_des_Gestes_pour_l_Inte.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098684/document-libre.pdf?1702800355=\u0026response-content-disposition=attachment%3B+filename%3DReconnaissance_3D_des_Gestes_pour_l_Inte.pdf\u0026Expires=1733201480\u0026Signature=YeHjmvf-8r36VXlvgEEvcUJmpHiz0G7OdZUlTvhzoWcXYM7nkIsnfAso5v0SaJib03pNfr6nJrjT2vJGbV0Yjg0LUGFispueszbKe72h7PpbwAsdasH~hmJViyN2xUCV5BeBas5UPjnkh9gy3dgvmHdFOoBvT4OVfwfZKVgrt71IxPUWJRoer8qIKmcikNrdj~DowBOb1Da5x~pAHq~Vf0cQwT4SNcOcySDXBt-ijoDVsmrQeBAsp77Wdv~2xOy4pr54Wiu5uQAt9OS-oRW8Prh9GHLlBdJ7~XpEywng5Z4FHAiUMv1y~g1ArR1E4OLYPKn8fO-XTrYDKktS5bbGGw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Reconnaissance_3D_des_Gestes_pour_l_Interaction_Naturelle_Homme_Robot","translated_slug":"","page_count":9,"language":"fr","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098684,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098684/thumbnails/1.jpg","file_name":"document.pdf","download_url":"https://www.academia.edu/attachments/109098684/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Reconnaissance_3D_des_Gestes_pour_l_Inte.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098684/document-libre.pdf?1702800355=\u0026response-content-disposition=attachment%3B+filename%3DReconnaissance_3D_des_Gestes_pour_l_Inte.pdf\u0026Expires=1733201480\u0026Signature=YeHjmvf-8r36VXlvgEEvcUJmpHiz0G7OdZUlTvhzoWcXYM7nkIsnfAso5v0SaJib03pNfr6nJrjT2vJGbV0Yjg0LUGFispueszbKe72h7PpbwAsdasH~hmJViyN2xUCV5BeBas5UPjnkh9gy3dgvmHdFOoBvT4OVfwfZKVgrt71IxPUWJRoer8qIKmcikNrdj~DowBOb1Da5x~pAHq~Vf0cQwT4SNcOcySDXBt-ijoDVsmrQeBAsp77Wdv~2xOy4pr54Wiu5uQAt9OS-oRW8Prh9GHLlBdJ7~XpEywng5Z4FHAiUMv1y~g1ArR1E4OLYPKn8fO-XTrYDKktS5bbGGw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"},{"id":109098689,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098689/thumbnails/1.jpg","file_name":"document.pdf","download_url":"https://www.academia.edu/attachments/109098689/download_file","bulk_download_file_name":"Reconnaissance_3D_des_Gestes_pour_l_Inte.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098689/document-libre.pdf?1702800354=\u0026response-content-disposition=attachment%3B+filename%3DReconnaissance_3D_des_Gestes_pour_l_Inte.pdf\u0026Expires=1733201480\u0026Signature=aAN5hpP3cgwXFRc~80H10GM7-g-wf6qzs2pkI-bR9wj0xAsmkii26IuMBtZ-J7yswQ5eHnk2cuxgeGnDDHpZqSqZVGwz02q~rK~rltT3kUaoCz~6YmJrCXsCmEkRv4rtltI7yFkVd7-N2MHRVYVF8C~S3UmJPmST~w6DiD74sy1bxebwsV8goKgTnZKnZugnm0veAEelff2NwbcCPAst-RRQBvjsqVj5haE0~Cr2mxZnK4~iESZmy3iw3fb9kmO0AQCyM4PuLl2q0gxXzEf5LaIhi0SfJg2st2-Fs9WNIXmGTuHkN6PE51hWmQ7NrrO1iElbqGcVgURwODbhy8wSuw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":951,"name":"Humanities","url":"https://www.academia.edu/Documents/in/Humanities"},{"id":1236,"name":"Art","url":"https://www.academia.edu/Documents/in/Art"},{"id":68937,"name":"Hidden Markov Models","url":"https://www.academia.edu/Documents/in/Hidden_Markov_Models"},{"id":4078440,"name":"gesture tracking","url":"https://www.academia.edu/Documents/in/gesture_tracking"}],"urls":[{"id":37229929,"url":"https://hal.archives-ouvertes.fr/hal-01177441/document"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609808"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609808/Dynamic_Gesture_Recognition_for_Natural_Human_System_Interaction"><img alt="Research paper thumbnail of Dynamic Gesture Recognition for Natural Human System Interaction" class="work-thumbnail" src="https://attachments.academia-assets.com/109098683/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609808/Dynamic_Gesture_Recognition_for_Natural_Human_System_Interaction">Dynamic Gesture Recognition for Natural Human System Interaction</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">This paper addresses two problems: 3d dynamic gesture recognition and gesture misallocation. In o...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">This paper addresses two problems: 3d dynamic gesture recognition and gesture misallocation. In order to solve these problems, we propose a new approach which combines Hidden Markov Models (HMM) and Dynamic Time Warping (DTW). The proposed approach has two main phases; first, recognizing gestures using a hidden Markov model. Second, avoiding misallocation by rejecting gestures based on a threshold computed using DTW. Our database includes many samples of five gestures obtained with a Kinect and described by depth information only. The results show that our approach yields good gesture classification without any misallocation and it is robust against environmental constraints.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="14717555d7c08ea1beffea23f93dea0d" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098683,&quot;asset_id&quot;:111609808,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098683/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609808"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609808"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609808; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609808]").text(description); $(".js-view-count[data-work-id=111609808]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609808; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609808']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609808, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "14717555d7c08ea1beffea23f93dea0d" } } $('.js-work-strip[data-work-id=111609808]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609808,"title":"Dynamic Gesture Recognition for Natural Human System Interaction","translated_title":"","metadata":{"abstract":"This paper addresses two problems: 3d dynamic gesture recognition and gesture misallocation. In order to solve these problems, we propose a new approach which combines Hidden Markov Models (HMM) and Dynamic Time Warping (DTW). The proposed approach has two main phases; first, recognizing gestures using a hidden Markov model. Second, avoiding misallocation by rejecting gestures based on a threshold computed using DTW. Our database includes many samples of five gestures obtained with a Kinect and described by depth information only. The results show that our approach yields good gesture classification without any misallocation and it is robust against environmental constraints.","publication_date":{"day":null,"month":null,"year":2016,"errors":{}}},"translated_abstract":"This paper addresses two problems: 3d dynamic gesture recognition and gesture misallocation. In order to solve these problems, we propose a new approach which combines Hidden Markov Models (HMM) and Dynamic Time Warping (DTW). The proposed approach has two main phases; first, recognizing gestures using a hidden Markov model. Second, avoiding misallocation by rejecting gestures based on a threshold computed using DTW. Our database includes many samples of five gestures obtained with a Kinect and described by depth information only. The results show that our approach yields good gesture classification without any misallocation and it is robust against environmental constraints.","internal_url":"https://www.academia.edu/111609808/Dynamic_Gesture_Recognition_for_Natural_Human_System_Interaction","translated_internal_url":"","created_at":"2023-12-16T23:55:56.906-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098683,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098683/thumbnails/1.jpg","file_name":"17Vol91No2.pdf","download_url":"https://www.academia.edu/attachments/109098683/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Dynamic_Gesture_Recognition_for_Natural.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098683/17Vol91No2-libre.pdf?1702800359=\u0026response-content-disposition=attachment%3B+filename%3DDynamic_Gesture_Recognition_for_Natural.pdf\u0026Expires=1733201480\u0026Signature=BXy0nNhedw-iVwo5syqTro6WkOadoVbjbrkA1o0WEfsdEhKPlLMM-4akHCZDc~x92qZxIO6Yox2pRX9IYAQiJANWfWM-EHStKDvgeyViuQoV9KqZagB5Sw-cQbxJZthSzBfphbtpkxMAczyBsfv2TsgOX9UEx3ruvgS2-MD~SE5PfqOrtsXe0EDRdSKNg2SYkGCDwVPffEFJCwdisUyVAyoJz8U4onXFK1zw51anncHuddVN9XWHmtSngsajz~w~wsyXLWMRDi-ThiKjM~Vm10PyDnFy9FKqEIVjIEGcaLyYZrfZ3nwAnZKdfb8qjTZ5T5Z4tpGMSTHFIKGjRg6Brw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Dynamic_Gesture_Recognition_for_Natural_Human_System_Interaction","translated_slug":"","page_count":10,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098683,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098683/thumbnails/1.jpg","file_name":"17Vol91No2.pdf","download_url":"https://www.academia.edu/attachments/109098683/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Dynamic_Gesture_Recognition_for_Natural.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098683/17Vol91No2-libre.pdf?1702800359=\u0026response-content-disposition=attachment%3B+filename%3DDynamic_Gesture_Recognition_for_Natural.pdf\u0026Expires=1733201480\u0026Signature=BXy0nNhedw-iVwo5syqTro6WkOadoVbjbrkA1o0WEfsdEhKPlLMM-4akHCZDc~x92qZxIO6Yox2pRX9IYAQiJANWfWM-EHStKDvgeyViuQoV9KqZagB5Sw-cQbxJZthSzBfphbtpkxMAczyBsfv2TsgOX9UEx3ruvgS2-MD~SE5PfqOrtsXe0EDRdSKNg2SYkGCDwVPffEFJCwdisUyVAyoJz8U4onXFK1zw51anncHuddVN9XWHmtSngsajz~w~wsyXLWMRDi-ThiKjM~Vm10PyDnFy9FKqEIVjIEGcaLyYZrfZ3nwAnZKdfb8qjTZ5T5Z4tpGMSTHFIKGjRg6Brw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence"},{"id":472,"name":"Human Computer Interaction","url":"https://www.academia.edu/Documents/in/Human_Computer_Interaction"},{"id":3147,"name":"Gesture","url":"https://www.academia.edu/Documents/in/Gesture"},{"id":17701,"name":"Gesture Recognition","url":"https://www.academia.edu/Documents/in/Gesture_Recognition"},{"id":91387,"name":"Kinect","url":"https://www.academia.edu/Documents/in/Kinect"},{"id":143539,"name":"hidden Markov model","url":"https://www.academia.edu/Documents/in/hidden_Markov_model"},{"id":432494,"name":"Dynamic Time Warping","url":"https://www.academia.edu/Documents/in/Dynamic_Time_Warping"},{"id":1350274,"name":"Archaeology of Natural Places","url":"https://www.academia.edu/Documents/in/Archaeology_of_Natural_Places"}],"urls":[{"id":37229928,"url":"http://www.jatit.org/volumes/Vol91No2/17Vol91No2.pdf"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609807"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609807/Combining_HoloLens_and_Leap_Motion_for_Free_Hand_Based_3D_Interaction_in_MR_Environments"><img alt="Research paper thumbnail of Combining HoloLens and Leap-Motion for Free Hand-Based 3D Interaction in MR Environments" class="work-thumbnail" src="https://attachments.academia-assets.com/109098737/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609807/Combining_HoloLens_and_Leap_Motion_for_Free_Hand_Based_3D_Interaction_in_MR_Environments">Combining HoloLens and Leap-Motion for Free Hand-Based 3D Interaction in MR Environments</a></div><div class="wp-workCard_item"><span>Lecture Notes in Computer Science</span><span>, 2020</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="3a00f5ff7e83c04d6462451f6e8b2d85" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098737,&quot;asset_id&quot;:111609807,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098737/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609807"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609807"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609807; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609807]").text(description); $(".js-view-count[data-work-id=111609807]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609807; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609807']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609807, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "3a00f5ff7e83c04d6462451f6e8b2d85" } } $('.js-work-strip[data-work-id=111609807]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609807,"title":"Combining HoloLens and Leap-Motion for Free Hand-Based 3D Interaction in MR Environments","translated_title":"","metadata":{"publisher":"Springer International Publishing","grobid_abstract":"The ability to interact with virtual objects using gestures would allow users to improve their experience in Mixed Reality (MR) environments, especially when they use AR headsets. Today, MR head-mounted displays like the HoloLens integrate hand gesture based interaction allowing users to take actions in MR environments. However, the proposed interactions remain limited. In this paper, we propose to combine a Leap Motion Controller (LMC) with a HoloLens in order to improve gesture interaction with virtual objects. Two main issues are presented: an interactive calibration procedure for the coupled HoloLens-LMC device and an intuitive hand-based interaction approach using LMC data in the HoloLens environment. A set of first experiments was carried out to evaluate the accuracy and the usability of the proposed approach.","publication_date":{"day":null,"month":null,"year":2020,"errors":{}},"publication_name":"Lecture Notes in Computer Science","grobid_abstract_attachment_id":109098737},"translated_abstract":null,"internal_url":"https://www.academia.edu/111609807/Combining_HoloLens_and_Leap_Motion_for_Free_Hand_Based_3D_Interaction_in_MR_Environments","translated_internal_url":"","created_at":"2023-12-16T23:55:56.662-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098737,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098737/thumbnails/1.jpg","file_name":"LISPEN_2020_AVR_ABABSA.pdf","download_url":"https://www.academia.edu/attachments/109098737/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Combining_HoloLens_and_Leap_Motion_for_F.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098737/LISPEN_2020_AVR_ABABSA-libre.pdf?1702800348=\u0026response-content-disposition=attachment%3B+filename%3DCombining_HoloLens_and_Leap_Motion_for_F.pdf\u0026Expires=1733201480\u0026Signature=AEpeTFzEtmMcvnr11H1sLg7lDgTgmDTsB-yYTJnxQP~73pjvZNxY4oIDVeIt~u~mI91KO4E3XExsISWDyCLy1q9qpmip9YQ4RBxXCwU0MaAQnvepbxaAzPR2LIyedmFxnLffzQzJriUTERNOeZFb8njaDaFtQ1WjRdDdNV2SveLY45xVzKPlDh45Kmy55sLFrAAUeTJnCYeIB5s9SgkmKV1NPOq2rKbDt~Y26PIk3-UVQkFXzXYy0ORZxQ5isKHgcFnRXZZPQ703end0~q3pOhLlArnKe6097pldvBVWMCWN4T8AyTajMFWE2LuDfLxvuf~DjZwZHJBITa3WSrmpLQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Combining_HoloLens_and_Leap_Motion_for_Free_Hand_Based_3D_Interaction_in_MR_Environments","translated_slug":"","page_count":14,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098737,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098737/thumbnails/1.jpg","file_name":"LISPEN_2020_AVR_ABABSA.pdf","download_url":"https://www.academia.edu/attachments/109098737/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MCw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Combining_HoloLens_and_Leap_Motion_for_F.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098737/LISPEN_2020_AVR_ABABSA-libre.pdf?1702800348=\u0026response-content-disposition=attachment%3B+filename%3DCombining_HoloLens_and_Leap_Motion_for_F.pdf\u0026Expires=1733201480\u0026Signature=AEpeTFzEtmMcvnr11H1sLg7lDgTgmDTsB-yYTJnxQP~73pjvZNxY4oIDVeIt~u~mI91KO4E3XExsISWDyCLy1q9qpmip9YQ4RBxXCwU0MaAQnvepbxaAzPR2LIyedmFxnLffzQzJriUTERNOeZFb8njaDaFtQ1WjRdDdNV2SveLY45xVzKPlDh45Kmy55sLFrAAUeTJnCYeIB5s9SgkmKV1NPOq2rKbDt~Y26PIk3-UVQkFXzXYy0ORZxQ5isKHgcFnRXZZPQ703end0~q3pOhLlArnKe6097pldvBVWMCWN4T8AyTajMFWE2LuDfLxvuf~DjZwZHJBITa3WSrmpLQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":472,"name":"Human Computer Interaction","url":"https://www.academia.edu/Documents/in/Human_Computer_Interaction"},{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision"},{"id":1615,"name":"Usability","url":"https://www.academia.edu/Documents/in/Usability"},{"id":3147,"name":"Gesture","url":"https://www.academia.edu/Documents/in/Gesture"},{"id":39370,"name":"Mixed Reality","url":"https://www.academia.edu/Documents/in/Mixed_Reality"},{"id":96156,"name":"Natural interaction","url":"https://www.academia.edu/Documents/in/Natural_interaction"},{"id":96893,"name":"Calibration","url":"https://www.academia.edu/Documents/in/Calibration"}],"urls":[{"id":37229926,"url":"https://link.springer.com/content/pdf/10.1007/978-3-030-58465-8_24"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609806"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609806/3D_Human_Tracking_with_Catadioptric_Omnidirectional_Camera"><img alt="Research paper thumbnail of 3D Human Tracking with Catadioptric Omnidirectional Camera" class="work-thumbnail" src="https://attachments.academia-assets.com/109098770/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609806/3D_Human_Tracking_with_Catadioptric_Omnidirectional_Camera">3D Human Tracking with Catadioptric Omnidirectional Camera</a></div><div class="wp-workCard_item"><span>Proceedings of the 2019 on International Conference on Multimedia Retrieval</span><span>, 2019</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="6f0271e0e5bf2ef2df5e822617413195" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098770,&quot;asset_id&quot;:111609806,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098770/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609806"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609806"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609806; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609806]").text(description); $(".js-view-count[data-work-id=111609806]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609806; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609806']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609806, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "6f0271e0e5bf2ef2df5e822617413195" } } $('.js-work-strip[data-work-id=111609806]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609806,"title":"3D Human Tracking with Catadioptric Omnidirectional Camera","translated_title":"","metadata":{"publisher":"ACM","ai_title_tag":"3D Human Tracking Using Catadioptric Omnidirectional Cameras","grobid_abstract":"This paper deals with the problem of 3D human tracking in catadioptric images using particle-filtering framework. While traditional perspective images are well exploited, only a few methods have been developed for catadioptric vision, for the human detection or tracking problems. We propose to extend the 3D pose estimation in the case of perspective cameras to catadioptric sensors. In this paper, we develop an original likelihood functions based, on the one hand, on the geodetic distance in the spherical space SO 3 and, on the other hand, on the mapping between the human silhouette in the images and the projected 3D model. These likelihood functions combined with a particle filter, whose propagation model is adapted to the spherical space, allow accurate 3D human tracking in omnidirectional images. Both visual and quantitative analysis of the experimental results demonstrate the effectiveness of our approach","publication_date":{"day":null,"month":null,"year":2019,"errors":{}},"publication_name":"Proceedings of the 2019 on International Conference on Multimedia Retrieval","grobid_abstract_attachment_id":109098770},"translated_abstract":null,"internal_url":"https://www.academia.edu/111609806/3D_Human_Tracking_with_Catadioptric_Omnidirectional_Camera","translated_internal_url":"","created_at":"2023-12-16T23:55:56.454-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098770,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098770/thumbnails/1.jpg","file_name":"LISPEN_ICMR_ABABSA_2019.pdf","download_url":"https://www.academia.edu/attachments/109098770/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"3D_Human_Tracking_with_Catadioptric_Omni.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098770/LISPEN_ICMR_ABABSA_2019-libre.pdf?1702800341=\u0026response-content-disposition=attachment%3B+filename%3D3D_Human_Tracking_with_Catadioptric_Omni.pdf\u0026Expires=1733201480\u0026Signature=Zhzx7TqO6c4iaJTU9NVkfzX317XDgiCMOvXsxHAuKnOMToDR4SVCVkh4jS7YzMtO0goHNo9c1UyH3MdTG1RlQUWnkYV2YVaK1nmgdMMUI1hySr1tdBcuz~v8YDlSHoQr2xts9PLyDNqIbBhsyzBNYaEPnhTN8EVr00PHV1F5-pLpSF5LjnjnHuzlqYYjf8ZrNIL0FnJYcx0G8C6E2FhjERRM53Ytoe4mIwIjXoOU4FgpQvg~2sz-VOjIn3TmHf8P~DHw5D2YoNtl9CTLdIHg5dYRQiMAw7o-KHvP8sTyKEa~lJzUJBX7bRDtkmP5AmARj4vQzR0Vt3ngPvASXoWeCQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"3D_Human_Tracking_with_Catadioptric_Omnidirectional_Camera","translated_slug":"","page_count":6,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098770,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098770/thumbnails/1.jpg","file_name":"LISPEN_ICMR_ABABSA_2019.pdf","download_url":"https://www.academia.edu/attachments/109098770/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"3D_Human_Tracking_with_Catadioptric_Omni.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098770/LISPEN_ICMR_ABABSA_2019-libre.pdf?1702800341=\u0026response-content-disposition=attachment%3B+filename%3D3D_Human_Tracking_with_Catadioptric_Omni.pdf\u0026Expires=1733201480\u0026Signature=Zhzx7TqO6c4iaJTU9NVkfzX317XDgiCMOvXsxHAuKnOMToDR4SVCVkh4jS7YzMtO0goHNo9c1UyH3MdTG1RlQUWnkYV2YVaK1nmgdMMUI1hySr1tdBcuz~v8YDlSHoQr2xts9PLyDNqIbBhsyzBNYaEPnhTN8EVr00PHV1F5-pLpSF5LjnjnHuzlqYYjf8ZrNIL0FnJYcx0G8C6E2FhjERRM53Ytoe4mIwIjXoOU4FgpQvg~2sz-VOjIn3TmHf8P~DHw5D2YoNtl9CTLdIHg5dYRQiMAw7o-KHvP8sTyKEa~lJzUJBX7bRDtkmP5AmARj4vQzR0Vt3ngPvASXoWeCQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence"},{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision"},{"id":100874,"name":"Human Tracking","url":"https://www.academia.edu/Documents/in/Human_Tracking"},{"id":174252,"name":"Particle Filtering","url":"https://www.academia.edu/Documents/in/Particle_Filtering"},{"id":261121,"name":"Particle Filter","url":"https://www.academia.edu/Documents/in/Particle_Filter"},{"id":1646933,"name":"Egomotion","url":"https://www.academia.edu/Documents/in/Egomotion"},{"id":1648187,"name":"Silhouette","url":"https://www.academia.edu/Documents/in/Silhouette"},{"id":2755698,"name":"Omnidirectional Camera","url":"https://www.academia.edu/Documents/in/Omnidirectional_Camera"},{"id":4100396,"name":"Omnidirectional antenna ","url":"https://www.academia.edu/Documents/in/Omnidirectional_antenna"}],"urls":[{"id":37229925,"url":"https://dl.acm.org/doi/pdf/10.1145/3323873.3325027"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609805"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609805/Augmented_Reality_Application_in_Manufacturing_Industry_Maintenance_and_Non_destructive_Testing_NDT_Use_Cases"><img alt="Research paper thumbnail of Augmented Reality Application in Manufacturing Industry: Maintenance and Non-destructive Testing (NDT) Use Cases" class="work-thumbnail" src="https://attachments.academia-assets.com/109098736/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609805/Augmented_Reality_Application_in_Manufacturing_Industry_Maintenance_and_Non_destructive_Testing_NDT_Use_Cases">Augmented Reality Application in Manufacturing Industry: Maintenance and Non-destructive Testing (NDT) Use Cases</a></div><div class="wp-workCard_item"><span>Lecture Notes in Computer Science</span><span>, 2020</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="be65ec809ef9fbfad02588ad19daac14" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098736,&quot;asset_id&quot;:111609805,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098736/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609805"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609805"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609805; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609805]").text(description); $(".js-view-count[data-work-id=111609805]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609805; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609805']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609805, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "be65ec809ef9fbfad02588ad19daac14" } } $('.js-work-strip[data-work-id=111609805]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609805,"title":"Augmented Reality Application in Manufacturing Industry: Maintenance and Non-destructive Testing (NDT) Use Cases","translated_title":"","metadata":{"publisher":"Springer International Publishing","grobid_abstract":"In recent years, a structural transformation of the manufacturing industry has been occurring as a result of the digital revolution. Thus, digital tools are now systematically used throughout the entire value chain, from design to production to marketing, especially virtual and augmented reality. Therefore, the purpose of this paper is to review, through concrete use cases, the progress of these novel technologies and their use in the manufacturing industry.","publication_date":{"day":null,"month":null,"year":2020,"errors":{}},"publication_name":"Lecture Notes in Computer Science","grobid_abstract_attachment_id":109098736},"translated_abstract":null,"internal_url":"https://www.academia.edu/111609805/Augmented_Reality_Application_in_Manufacturing_Industry_Maintenance_and_Non_destructive_Testing_NDT_Use_Cases","translated_internal_url":"","created_at":"2023-12-16T23:55:56.237-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098736,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098736/thumbnails/1.jpg","file_name":"LISPEN_AVR_2020_ABABSA_2.pdf","download_url":"https://www.academia.edu/attachments/109098736/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Augmented_Reality_Application_in_Manufac.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098736/LISPEN_AVR_2020_ABABSA_2-libre.pdf?1702800349=\u0026response-content-disposition=attachment%3B+filename%3DAugmented_Reality_Application_in_Manufac.pdf\u0026Expires=1733201481\u0026Signature=gTs-VKF2ezmr-4Mwqwnxq~3dLZyItX35AKELmD2oYP5jogKIT~VJcl55iV0StL1DBQHn75wWGp0OinViKu-5TsJpnwE8ZQnIA3xX0nwc87r7-Pd3d3snvOChCw4QV4VcGUBum80zkA6waC4bdh~pTavWypVvMvlxznco3KAiyd9sOe5K-z85cmZ2KG41von-odaOYsYHbWdmstGP1dQkrYYjLPjyZSchOtEa1Rnrm1uIaQrO5mW9OBDv-kB-K8pXVSbf4DCFD1wGTayZEeQqWh2MdaJp30Ed3ExWQuyQtPujeP3GIVOtZnzYSvJt1S~HR15ZdX3V0vmla0C~AnTwpw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Augmented_Reality_Application_in_Manufacturing_Industry_Maintenance_and_Non_destructive_Testing_NDT_Use_Cases","translated_slug":"","page_count":13,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098736,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098736/thumbnails/1.jpg","file_name":"LISPEN_AVR_2020_ABABSA_2.pdf","download_url":"https://www.academia.edu/attachments/109098736/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Augmented_Reality_Application_in_Manufac.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098736/LISPEN_AVR_2020_ABABSA_2-libre.pdf?1702800349=\u0026response-content-disposition=attachment%3B+filename%3DAugmented_Reality_Application_in_Manufac.pdf\u0026Expires=1733201481\u0026Signature=gTs-VKF2ezmr-4Mwqwnxq~3dLZyItX35AKELmD2oYP5jogKIT~VJcl55iV0StL1DBQHn75wWGp0OinViKu-5TsJpnwE8ZQnIA3xX0nwc87r7-Pd3d3snvOChCw4QV4VcGUBum80zkA6waC4bdh~pTavWypVvMvlxznco3KAiyd9sOe5K-z85cmZ2KG41von-odaOYsYHbWdmstGP1dQkrYYjLPjyZSchOtEa1Rnrm1uIaQrO5mW9OBDv-kB-K8pXVSbf4DCFD1wGTayZEeQqWh2MdaJp30Ed3ExWQuyQtPujeP3GIVOtZnzYSvJt1S~HR15ZdX3V0vmla0C~AnTwpw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":48,"name":"Engineering","url":"https://www.academia.edu/Documents/in/Engineering"},{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":2979,"name":"Manufacturing","url":"https://www.academia.edu/Documents/in/Manufacturing"},{"id":5673,"name":"Augmented Reality","url":"https://www.academia.edu/Documents/in/Augmented_Reality"},{"id":51166,"name":"Nondestructive testing","url":"https://www.academia.edu/Documents/in/Nondestructive_testing"},{"id":85280,"name":"Industry","url":"https://www.academia.edu/Documents/in/Industry"},{"id":96825,"name":"Manufacturing Engineering","url":"https://www.academia.edu/Documents/in/Manufacturing_Engineering"},{"id":379854,"name":"Human Machine Interaction","url":"https://www.academia.edu/Documents/in/Human_Machine_Interaction"},{"id":1400864,"name":"Digital Transformation","url":"https://www.academia.edu/Documents/in/Digital_Transformation"}],"urls":[{"id":37229924,"url":"https://link.springer.com/content/pdf/10.1007/978-3-030-58468-9_24"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609804"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609804/Methodology_for_the_Field_Evaluation_of_the_Impact_of_Augmented_Reality_Tools_for_Maintenance_Workers_in_the_Aeronautic_Industry"><img alt="Research paper thumbnail of Methodology for the Field Evaluation of the Impact of Augmented Reality Tools for Maintenance Workers in the Aeronautic Industry" class="work-thumbnail" src="https://attachments.academia-assets.com/109098761/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609804/Methodology_for_the_Field_Evaluation_of_the_Impact_of_Augmented_Reality_Tools_for_Maintenance_Workers_in_the_Aeronautic_Industry">Methodology for the Field Evaluation of the Impact of Augmented Reality Tools for Maintenance Workers in the Aeronautic Industry</a></div><div class="wp-workCard_item"><span>Frontiers in Virtual Reality</span><span>, 2021</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Augmented Reality (AR) enhances the comprehension of complex situations by making the handling of...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Augmented Reality (AR) enhances the comprehension of complex situations by making the handling of contextual information easier. Maintenance activities in aeronautics consist of complex tasks carried out on various high-technology products under severe constraints from the sector and work environment. AR tools appear to be a potential solution to improve interactions between workers and technical data to increase the productivity and the quality of aeronautical maintenance activities. However, assessments of the actual impact of AR on industrial processes are limited due to a lack of methods and tools to assist in the integration and evaluation of AR tools in the field. This paper presents a method for deploying AR tools adapted to maintenance workers and for selecting relevant evaluation criteria of the impact in an industrial context. This method is applied to design an AR tool for the maintenance workshop, to experiment on real use cases, and to observe the impact of AR on produc...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="63ea5efb3ac6d2e7768889c65ef3c5ef" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098761,&quot;asset_id&quot;:111609804,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098761/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609804"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609804"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609804; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609804]").text(description); $(".js-view-count[data-work-id=111609804]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609804; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609804']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609804, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "63ea5efb3ac6d2e7768889c65ef3c5ef" } } $('.js-work-strip[data-work-id=111609804]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609804,"title":"Methodology for the Field Evaluation of the Impact of Augmented Reality Tools for Maintenance Workers in the Aeronautic Industry","translated_title":"","metadata":{"abstract":"Augmented Reality (AR) enhances the comprehension of complex situations by making the handling of contextual information easier. Maintenance activities in aeronautics consist of complex tasks carried out on various high-technology products under severe constraints from the sector and work environment. AR tools appear to be a potential solution to improve interactions between workers and technical data to increase the productivity and the quality of aeronautical maintenance activities. However, assessments of the actual impact of AR on industrial processes are limited due to a lack of methods and tools to assist in the integration and evaluation of AR tools in the field. This paper presents a method for deploying AR tools adapted to maintenance workers and for selecting relevant evaluation criteria of the impact in an industrial context. This method is applied to design an AR tool for the maintenance workshop, to experiment on real use cases, and to observe the impact of AR on produc...","publisher":"Frontiers Media SA","publication_date":{"day":null,"month":null,"year":2021,"errors":{}},"publication_name":"Frontiers in Virtual Reality"},"translated_abstract":"Augmented Reality (AR) enhances the comprehension of complex situations by making the handling of contextual information easier. Maintenance activities in aeronautics consist of complex tasks carried out on various high-technology products under severe constraints from the sector and work environment. AR tools appear to be a potential solution to improve interactions between workers and technical data to increase the productivity and the quality of aeronautical maintenance activities. However, assessments of the actual impact of AR on industrial processes are limited due to a lack of methods and tools to assist in the integration and evaluation of AR tools in the field. This paper presents a method for deploying AR tools adapted to maintenance workers and for selecting relevant evaluation criteria of the impact in an industrial context. This method is applied to design an AR tool for the maintenance workshop, to experiment on real use cases, and to observe the impact of AR on produc...","internal_url":"https://www.academia.edu/111609804/Methodology_for_the_Field_Evaluation_of_the_Impact_of_Augmented_Reality_Tools_for_Maintenance_Workers_in_the_Aeronautic_Industry","translated_internal_url":"","created_at":"2023-12-16T23:55:56.022-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098761,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098761/thumbnails/1.jpg","file_name":"frvir-01-603189.pdf","download_url":"https://www.academia.edu/attachments/109098761/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Methodology_for_the_Field_Evaluation_of.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098761/frvir-01-603189-libre.pdf?1702800345=\u0026response-content-disposition=attachment%3B+filename%3DMethodology_for_the_Field_Evaluation_of.pdf\u0026Expires=1733201481\u0026Signature=ZR~YJvewR-cfH9vwSB~0iyIkfhS8413YQUTOu-K5H-f-5~Rbaxz-JOe71QINxvXSF21n007XWFdLUg25sraL3wfJT0~QOCNwZenMvljO8-iUSt39GZUpqJeEaclIvI0YGbFyQD3uplT-ylmfcYbYCxn8fpwLnHM0iBIlESSRQz4bqUhFs1RcTMY2Fpfduh5agZo8GCiTmZTMkFhJ1rxQpIBfDH3Yxa3F~y~b-r44UFDP3NEhSkliWI1dZAPlh3HfYuu~PlfPDJk0sVt2XLLNsSX2~Qv8ukAJPkbLCHL1bziNeGnPxF-torwtK7kjBKWh8N0WaCWwX32RoSRXpz6Llw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Methodology_for_the_Field_Evaluation_of_the_Impact_of_Augmented_Reality_Tools_for_Maintenance_Workers_in_the_Aeronautic_Industry","translated_slug":"","page_count":14,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098761,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098761/thumbnails/1.jpg","file_name":"frvir-01-603189.pdf","download_url":"https://www.academia.edu/attachments/109098761/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Methodology_for_the_Field_Evaluation_of.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098761/frvir-01-603189-libre.pdf?1702800345=\u0026response-content-disposition=attachment%3B+filename%3DMethodology_for_the_Field_Evaluation_of.pdf\u0026Expires=1733201481\u0026Signature=ZR~YJvewR-cfH9vwSB~0iyIkfhS8413YQUTOu-K5H-f-5~Rbaxz-JOe71QINxvXSF21n007XWFdLUg25sraL3wfJT0~QOCNwZenMvljO8-iUSt39GZUpqJeEaclIvI0YGbFyQD3uplT-ylmfcYbYCxn8fpwLnHM0iBIlESSRQz4bqUhFs1RcTMY2Fpfduh5agZo8GCiTmZTMkFhJ1rxQpIBfDH3Yxa3F~y~b-r44UFDP3NEhSkliWI1dZAPlh3HfYuu~PlfPDJk0sVt2XLLNsSX2~Qv8ukAJPkbLCHL1bziNeGnPxF-torwtK7kjBKWh8N0WaCWwX32RoSRXpz6Llw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":5673,"name":"Augmented Reality","url":"https://www.academia.edu/Documents/in/Augmented_Reality"},{"id":8968,"name":"Productivity","url":"https://www.academia.edu/Documents/in/Productivity"},{"id":57873,"name":"Aeronautic","url":"https://www.academia.edu/Documents/in/Aeronautic"},{"id":85280,"name":"Industry","url":"https://www.academia.edu/Documents/in/Industry"},{"id":87372,"name":"Maintenance","url":"https://www.academia.edu/Documents/in/Maintenance"},{"id":345840,"name":"Criteria","url":"https://www.academia.edu/Documents/in/Criteria"},{"id":1781499,"name":"Deploy","url":"https://www.academia.edu/Documents/in/Deploy"}],"urls":[{"id":37229923,"url":"https://www.frontiersin.org/articles/10.3389/frvir.2020.603189/full"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609803"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609803/3D_Human_Pose_Estimation_with_a_Catadioptric_Sensor_in_Unconstrained_Environments_Using_an_Annealed_Particle_Filter"><img alt="Research paper thumbnail of 3D Human Pose Estimation with a Catadioptric Sensor in Unconstrained Environments Using an Annealed Particle Filter" class="work-thumbnail" src="https://attachments.academia-assets.com/109098705/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609803/3D_Human_Pose_Estimation_with_a_Catadioptric_Sensor_in_Unconstrained_Environments_Using_an_Annealed_Particle_Filter">3D Human Pose Estimation with a Catadioptric Sensor in Unconstrained Environments Using an Annealed Particle Filter</a></div><div class="wp-workCard_item"><span>Sensors</span><span>, 2020</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The purpose of this paper is to investigate the problem of 3D human tracking in complex environme...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The purpose of this paper is to investigate the problem of 3D human tracking in complex environments using a particle filter with images captured by a catadioptric vision system. This issue has been widely studied in the literature on RGB images acquired from conventional perspective cameras, while omnidirectional images have seldom been used and published research works in this field remains limited. In this study, the Riemannian varieties was considered in order to compute the gradient on spherical images and generate a robust descriptor used along with an SVM classifier for human detection. Original likelihood functions associated with the particle filter are proposed, using both geodesic distances and overlapping regions between the silhouette detected in the images and the projected 3D human model. Our approach was experimentally evaluated on real data and showed favorable results compared to machine learning based techniques about the 3D pose accuracy. Thus, the Root Mean Squa...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="d73b96cd912a61b2eb5645e20f435689" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098705,&quot;asset_id&quot;:111609803,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098705/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609803"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609803"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609803; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609803]").text(description); $(".js-view-count[data-work-id=111609803]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609803; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609803']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609803, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "d73b96cd912a61b2eb5645e20f435689" } } $('.js-work-strip[data-work-id=111609803]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609803,"title":"3D Human Pose Estimation with a Catadioptric Sensor in Unconstrained Environments Using an Annealed Particle Filter","translated_title":"","metadata":{"abstract":"The purpose of this paper is to investigate the problem of 3D human tracking in complex environments using a particle filter with images captured by a catadioptric vision system. This issue has been widely studied in the literature on RGB images acquired from conventional perspective cameras, while omnidirectional images have seldom been used and published research works in this field remains limited. In this study, the Riemannian varieties was considered in order to compute the gradient on spherical images and generate a robust descriptor used along with an SVM classifier for human detection. Original likelihood functions associated with the particle filter are proposed, using both geodesic distances and overlapping regions between the silhouette detected in the images and the projected 3D human model. Our approach was experimentally evaluated on real data and showed favorable results compared to machine learning based techniques about the 3D pose accuracy. Thus, the Root Mean Squa...","publisher":"MDPI AG","publication_date":{"day":null,"month":null,"year":2020,"errors":{}},"publication_name":"Sensors"},"translated_abstract":"The purpose of this paper is to investigate the problem of 3D human tracking in complex environments using a particle filter with images captured by a catadioptric vision system. This issue has been widely studied in the literature on RGB images acquired from conventional perspective cameras, while omnidirectional images have seldom been used and published research works in this field remains limited. In this study, the Riemannian varieties was considered in order to compute the gradient on spherical images and generate a robust descriptor used along with an SVM classifier for human detection. Original likelihood functions associated with the particle filter are proposed, using both geodesic distances and overlapping regions between the silhouette detected in the images and the projected 3D human model. Our approach was experimentally evaluated on real data and showed favorable results compared to machine learning based techniques about the 3D pose accuracy. Thus, the Root Mean Squa...","internal_url":"https://www.academia.edu/111609803/3D_Human_Pose_Estimation_with_a_Catadioptric_Sensor_in_Unconstrained_Environments_Using_an_Annealed_Particle_Filter","translated_internal_url":"","created_at":"2023-12-16T23:55:55.811-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098705,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098705/thumbnails/1.jpg","file_name":"pdf.pdf","download_url":"https://www.academia.edu/attachments/109098705/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"3D_Human_Pose_Estimation_with_a_Catadiop.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098705/pdf-libre.pdf?1702800360=\u0026response-content-disposition=attachment%3B+filename%3D3D_Human_Pose_Estimation_with_a_Catadiop.pdf\u0026Expires=1733201481\u0026Signature=bxJbG~iiky0MVElmi1Wsw2AREOS6B0054bR3NqrFjKk2amCjc5~Ayp6klYa35am7qLoK9y9jXyRzwHtqzUF8SThM4Vb4jH-vgzrs2XphNDAuIKj77vDFnh853e7SrTEy3zK6myf8Rhod3jDoi9g5NdUGe7g0o4hZsfIcRz3Hh0yebQafl2wGcA6aH-5STu3VQXzApM2dTaJwVIfh-2UpumavPqYDzBzJOd2wPUcvvd4c4Ag5Tmn7Yq~jrEH7iVVHnfjXkwwDssCdMTNM1lWX4Zp061PQPjFs-xBQE5wLobypvj6NdY7mzmXBQHiS6~acK5YpcyeW0QVghP~6QEaoOw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"3D_Human_Pose_Estimation_with_a_Catadioptric_Sensor_in_Unconstrained_Environments_Using_an_Annealed_Particle_Filter","translated_slug":"","page_count":17,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098705,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098705/thumbnails/1.jpg","file_name":"pdf.pdf","download_url":"https://www.academia.edu/attachments/109098705/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"3D_Human_Pose_Estimation_with_a_Catadiop.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098705/pdf-libre.pdf?1702800360=\u0026response-content-disposition=attachment%3B+filename%3D3D_Human_Pose_Estimation_with_a_Catadiop.pdf\u0026Expires=1733201481\u0026Signature=bxJbG~iiky0MVElmi1Wsw2AREOS6B0054bR3NqrFjKk2amCjc5~Ayp6klYa35am7qLoK9y9jXyRzwHtqzUF8SThM4Vb4jH-vgzrs2XphNDAuIKj77vDFnh853e7SrTEy3zK6myf8Rhod3jDoi9g5NdUGe7g0o4hZsfIcRz3Hh0yebQafl2wGcA6aH-5STu3VQXzApM2dTaJwVIfh-2UpumavPqYDzBzJOd2wPUcvvd4c4Ag5Tmn7Yq~jrEH7iVVHnfjXkwwDssCdMTNM1lWX4Zp061PQPjFs-xBQE5wLobypvj6NdY7mzmXBQHiS6~acK5YpcyeW0QVghP~6QEaoOw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"},{"id":109098688,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098688/thumbnails/1.jpg","file_name":"pdf.pdf","download_url":"https://www.academia.edu/attachments/109098688/download_file","bulk_download_file_name":"3D_Human_Pose_Estimation_with_a_Catadiop.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098688/pdf-libre.pdf?1702800366=\u0026response-content-disposition=attachment%3B+filename%3D3D_Human_Pose_Estimation_with_a_Catadiop.pdf\u0026Expires=1733201481\u0026Signature=EI77byXvxmqr1YuNRkLEFVfnROeh2KwHHC8~mM25Kz8otnzGdeNIWf4wbTKKaY0t5QYlGrSyz3aBXKMvyIdiwYh1OWtHtDES0PmgI~K8xKA89UVCMHOjPnZ3~dvLCIm5J6BDN~DAQOgOTy9BU5PXH~BL~XjI1pTOioE2~73LOZluBLT9nsgK9RMWSFRKW-iPCb1fKx4ZSJ8wH5EmTaw3x398~ae1k33z60b5g1m7rlZan2Bnn4B-Tmj0LSb~BupZIaPpj3VoKROpmZ~wemwouivtT7AHw0hjdJnD7o~AeMvgywcEyTnVhfpfuM3n30gMm5DvANLkWQ3XTtjYPNEQzA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence"},{"id":524,"name":"Analytical Chemistry","url":"https://www.academia.edu/Documents/in/Analytical_Chemistry"},{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision"},{"id":26327,"name":"Medicine","url":"https://www.academia.edu/Documents/in/Medicine"},{"id":55405,"name":"Sensors","url":"https://www.academia.edu/Documents/in/Sensors"},{"id":100874,"name":"Human Tracking","url":"https://www.academia.edu/Documents/in/Human_Tracking"},{"id":261121,"name":"Particle Filter","url":"https://www.academia.edu/Documents/in/Particle_Filter"},{"id":1237788,"name":"Electrical And Electronic Engineering","url":"https://www.academia.edu/Documents/in/Electrical_And_Electronic_Engineering"},{"id":1648187,"name":"Silhouette","url":"https://www.academia.edu/Documents/in/Silhouette"},{"id":2755698,"name":"Omnidirectional Camera","url":"https://www.academia.edu/Documents/in/Omnidirectional_Camera"},{"id":3209333,"name":"Mean Squared Error","url":"https://www.academia.edu/Documents/in/Mean_Squared_Error"}],"urls":[{"id":37229922,"url":"https://www.mdpi.com/1424-8220/20/23/6985/pdf"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609802"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609802/An_Efficient_Human_Activity_Recognition_Technique_Based_on_Deep_Learning"><img alt="Research paper thumbnail of An Efficient Human Activity Recognition Technique Based on Deep Learning" class="work-thumbnail" src="https://attachments.academia-assets.com/109098776/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609802/An_Efficient_Human_Activity_Recognition_Technique_Based_on_Deep_Learning">An Efficient Human Activity Recognition Technique Based on Deep Learning</a></div><div class="wp-workCard_item"><span>Pattern Recognition and Image Analysis</span><span>, 2019</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="cb81a65636e34f8edb82a26e73ea1a41" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098776,&quot;asset_id&quot;:111609802,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098776/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609802"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609802"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609802; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609802]").text(description); $(".js-view-count[data-work-id=111609802]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609802; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609802']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609802, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "cb81a65636e34f8edb82a26e73ea1a41" } } $('.js-work-strip[data-work-id=111609802]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609802,"title":"An Efficient Human Activity Recognition Technique Based on Deep Learning","translated_title":"","metadata":{"publisher":"Pleiades Publishing Ltd","ai_title_tag":"Efficient Deep Learning for Human Activity Recognition","grobid_abstract":"In this paper, we present a new deep learning-based human activity recognition technique. First, we track and extract human body from each frame of the video stream. Next, we abstract human silhouettes and use them to create binary space-time maps (BSTMs) which summarize human activity within a defined time interval. Finally, we use convolutional neural network (CNN) to extract features from BSTMs and classify the activities. To evaluate our approach, we carried out several tests using three public datasets: Weizmann, Keck Gesture and KTH Database. Experimental results show that our technique outperforms conventional state-of-the-art methods in term of recognition accuracy and provides comparable performance against recent deep learning techniques. It's simple to implement, requires less computing power, and can be used for multi-subject activity recognition.","publication_date":{"day":null,"month":null,"year":2019,"errors":{}},"publication_name":"Pattern Recognition and Image Analysis","grobid_abstract_attachment_id":109098776},"translated_abstract":null,"internal_url":"https://www.academia.edu/111609802/An_Efficient_Human_Activity_Recognition_Technique_Based_on_Deep_Learning","translated_internal_url":"","created_at":"2023-12-16T23:55:55.558-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098776,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098776/thumbnails/1.jpg","file_name":"LISPEN_PRIA_ABABSA_2019.pdf","download_url":"https://www.academia.edu/attachments/109098776/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"An_Efficient_Human_Activity_Recognition.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098776/LISPEN_PRIA_ABABSA_2019-libre.pdf?1702800348=\u0026response-content-disposition=attachment%3B+filename%3DAn_Efficient_Human_Activity_Recognition.pdf\u0026Expires=1733201481\u0026Signature=fWxzzyFgkmNzbaTMZa0WG4HPAANcrinG21zekO~o5MDR45JTASjkxPrabp1~IsjnLl5n8ML0dagyKUCjSgAulMGTxPRu2WcKi7bW3DmH-~RmVYU8LVfSs-q6B2QgUGVBgtM9UHFEDBNekwnhAx0mQLrmDSCyPWGBQXi2TvY8CkTckq6txFbix5kbkunUu3o-kFl95i2yPFmcSgQ1KGVu405M4aAYoJx3MLBc8R2D274f0NlJpTnGNVb-qO6685OV6vaae~chDIUI6BAFAfPUJ3iRxXdu~CWcQzDB2hICo1uQRVTFPXSk3uXYfOePJN-lxJ7A4ysboswO9oTmzFq5pw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"An_Efficient_Human_Activity_Recognition_Technique_Based_on_Deep_Learning","translated_slug":"","page_count":15,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098776,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098776/thumbnails/1.jpg","file_name":"LISPEN_PRIA_ABABSA_2019.pdf","download_url":"https://www.academia.edu/attachments/109098776/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"An_Efficient_Human_Activity_Recognition.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098776/LISPEN_PRIA_ABABSA_2019-libre.pdf?1702800348=\u0026response-content-disposition=attachment%3B+filename%3DAn_Efficient_Human_Activity_Recognition.pdf\u0026Expires=1733201481\u0026Signature=fWxzzyFgkmNzbaTMZa0WG4HPAANcrinG21zekO~o5MDR45JTASjkxPrabp1~IsjnLl5n8ML0dagyKUCjSgAulMGTxPRu2WcKi7bW3DmH-~RmVYU8LVfSs-q6B2QgUGVBgtM9UHFEDBNekwnhAx0mQLrmDSCyPWGBQXi2TvY8CkTckq6txFbix5kbkunUu3o-kFl95i2yPFmcSgQ1KGVu405M4aAYoJx3MLBc8R2D274f0NlJpTnGNVb-qO6685OV6vaae~chDIUI6BAFAfPUJ3iRxXdu~CWcQzDB2hICo1uQRVTFPXSk3uXYfOePJN-lxJ7A4ysboswO9oTmzFq5pw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":48,"name":"Engineering","url":"https://www.academia.edu/Documents/in/Engineering"},{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence"},{"id":17701,"name":"Gesture Recognition","url":"https://www.academia.edu/Documents/in/Gesture_Recognition"},{"id":36449,"name":"Activity Recognition","url":"https://www.academia.edu/Documents/in/Activity_Recognition"},{"id":57238,"name":"Human Activity Recognition","url":"https://www.academia.edu/Documents/in/Human_Activity_Recognition"},{"id":61145,"name":"Medical Image Analysis and Pattern Recognition","url":"https://www.academia.edu/Documents/in/Medical_Image_Analysis_and_Pattern_Recognition"},{"id":80414,"name":"Mathematical Sciences","url":"https://www.academia.edu/Documents/in/Mathematical_Sciences"},{"id":81182,"name":"Deep Learning","url":"https://www.academia.edu/Documents/in/Deep_Learning"},{"id":106145,"name":"Classification","url":"https://www.academia.edu/Documents/in/Classification"},{"id":1568111,"name":"Convolutional Neural Network","url":"https://www.academia.edu/Documents/in/Convolutional_Neural_Network"},{"id":1597410,"name":"Features Extraction","url":"https://www.academia.edu/Documents/in/Features_Extraction"},{"id":2058532,"name":"Convolutional Neural Network [CNN]","url":"https://www.academia.edu/Documents/in/Convolutional_Neural_Network_CNN_"}],"urls":[{"id":37229921,"url":"http://link.springer.com/content/pdf/10.1134/S1054661819040084.pdf"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609800"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609800/Fusion_of_structural_and_textural_features_for_melanoma_recognition"><img alt="Research paper thumbnail of Fusion of structural and textural features for melanoma recognition" class="work-thumbnail" src="https://attachments.academia-assets.com/109098762/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609800/Fusion_of_structural_and_textural_features_for_melanoma_recognition">Fusion of structural and textural features for melanoma recognition</a></div><div class="wp-workCard_item"><span>IET Computer Vision</span><span>, 2018</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Melanoma is one the most increasing cancers since past decades. For accurate detection and classi...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Melanoma is one the most increasing cancers since past decades. For accurate detection and classification, discriminative features are required to distinguish between benign and malignant cases. In this study, the authors introduce a fusion of structural and textural features from two descriptors. The structural features are extracted from wavelet and curvelet transforms, whereas the textural features are extracted from different variants of local binary pattern operator. The proposed method is implemented on 200 images from dermoscopy database including 160 non‐melanoma and 40 melanoma images, where a rigorous statistical analysis for the database is performed. Using support vector machine (SVM) classifier with random sampling cross‐validation method between the three cases of skin lesions given in the database, the validated results showed a very encouraging performance with a sensitivity of 78.93%, a specificity of 93.25% and an accuracy of 86.07%. The proposed approach outperfor...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="bd0812efde7ea58cf3f98e12837292d2" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098762,&quot;asset_id&quot;:111609800,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098762/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609800"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609800"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609800; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609800]").text(description); $(".js-view-count[data-work-id=111609800]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609800; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609800']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609800, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "bd0812efde7ea58cf3f98e12837292d2" } } $('.js-work-strip[data-work-id=111609800]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609800,"title":"Fusion of structural and textural features for melanoma recognition","translated_title":"","metadata":{"abstract":"Melanoma is one the most increasing cancers since past decades. For accurate detection and classification, discriminative features are required to distinguish between benign and malignant cases. In this study, the authors introduce a fusion of structural and textural features from two descriptors. The structural features are extracted from wavelet and curvelet transforms, whereas the textural features are extracted from different variants of local binary pattern operator. The proposed method is implemented on 200 images from dermoscopy database including 160 non‐melanoma and 40 melanoma images, where a rigorous statistical analysis for the database is performed. Using support vector machine (SVM) classifier with random sampling cross‐validation method between the three cases of skin lesions given in the database, the validated results showed a very encouraging performance with a sensitivity of 78.93%, a specificity of 93.25% and an accuracy of 86.07%. The proposed approach outperfor...","publisher":"Institution of Engineering and Technology (IET)","ai_title_tag":"Melanoma Detection via Structural and Textural Feature Fusion","publication_date":{"day":null,"month":null,"year":2018,"errors":{}},"publication_name":"IET Computer Vision"},"translated_abstract":"Melanoma is one the most increasing cancers since past decades. For accurate detection and classification, discriminative features are required to distinguish between benign and malignant cases. In this study, the authors introduce a fusion of structural and textural features from two descriptors. The structural features are extracted from wavelet and curvelet transforms, whereas the textural features are extracted from different variants of local binary pattern operator. The proposed method is implemented on 200 images from dermoscopy database including 160 non‐melanoma and 40 melanoma images, where a rigorous statistical analysis for the database is performed. Using support vector machine (SVM) classifier with random sampling cross‐validation method between the three cases of skin lesions given in the database, the validated results showed a very encouraging performance with a sensitivity of 78.93%, a specificity of 93.25% and an accuracy of 86.07%. The proposed approach outperfor...","internal_url":"https://www.academia.edu/111609800/Fusion_of_structural_and_textural_features_for_melanoma_recognition","translated_internal_url":"","created_at":"2023-12-16T23:55:55.307-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098762,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098762/thumbnails/1.jpg","file_name":"paper3.pdf","download_url":"https://www.academia.edu/attachments/109098762/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Fusion_of_structural_and_textural_featur.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098762/paper3-libre.pdf?1702800346=\u0026response-content-disposition=attachment%3B+filename%3DFusion_of_structural_and_textural_featur.pdf\u0026Expires=1733201481\u0026Signature=aqJHVsDbDdZfYXO4UYt7HgO1fWLWGM60OdKADjUzxARh5lVIFFfFh2tQiUFXI2BPEdFVud2VifwaaUXFJuDHSC9SO3wFrYxYPCFSN6SReIVI6Q1E6eqNvU14cmr40wb3sxUeHmQgeZmDliR3Np3J-d6DIMY5hJ7VvUgi8VNCNqeSbutUWacHQUEfa7htAyMtMvRYNYnf05jCsH-maZTWYmOOfLEoCcS4pvYAYu~ANsrUx0mxKwMoaWsX9NDHsRje-NRUzgkUjjenMchF9oH7YE-kfCsUbdL5TmDFl8MFS4L1Gcm6YlD4X1vL8srQGW41eH0uogcpaCMq6XAohCZm7g__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Fusion_of_structural_and_textural_features_for_melanoma_recognition","translated_slug":"","page_count":7,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098762,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098762/thumbnails/1.jpg","file_name":"paper3.pdf","download_url":"https://www.academia.edu/attachments/109098762/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Fusion_of_structural_and_textural_featur.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098762/paper3-libre.pdf?1702800346=\u0026response-content-disposition=attachment%3B+filename%3DFusion_of_structural_and_textural_featur.pdf\u0026Expires=1733201481\u0026Signature=aqJHVsDbDdZfYXO4UYt7HgO1fWLWGM60OdKADjUzxARh5lVIFFfFh2tQiUFXI2BPEdFVud2VifwaaUXFJuDHSC9SO3wFrYxYPCFSN6SReIVI6Q1E6eqNvU14cmr40wb3sxUeHmQgeZmDliR3Np3J-d6DIMY5hJ7VvUgi8VNCNqeSbutUWacHQUEfa7htAyMtMvRYNYnf05jCsH-maZTWYmOOfLEoCcS4pvYAYu~ANsrUx0mxKwMoaWsX9NDHsRje-NRUzgkUjjenMchF9oH7YE-kfCsUbdL5TmDFl8MFS4L1Gcm6YlD4X1vL8srQGW41eH0uogcpaCMq6XAohCZm7g__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":237,"name":"Cognitive Science","url":"https://www.academia.edu/Documents/in/Cognitive_Science"},{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence"},{"id":4998,"name":"Medical Image Processing","url":"https://www.academia.edu/Documents/in/Medical_Image_Processing"},{"id":6021,"name":"Cancer","url":"https://www.academia.edu/Documents/in/Cancer"},{"id":10408,"name":"Support Vector Machines","url":"https://www.academia.edu/Documents/in/Support_Vector_Machines"},{"id":83038,"name":"Image fusion","url":"https://www.academia.edu/Documents/in/Image_fusion"},{"id":91365,"name":"Wavelet Transforms","url":"https://www.academia.edu/Documents/in/Wavelet_Transforms"},{"id":150094,"name":"Fusion","url":"https://www.academia.edu/Documents/in/Fusion"},{"id":160144,"name":"Feature Extraction","url":"https://www.academia.edu/Documents/in/Feature_Extraction"},{"id":167397,"name":"Image recognition","url":"https://www.academia.edu/Documents/in/Image_recognition"},{"id":191289,"name":"Support vector machine","url":"https://www.academia.edu/Documents/in/Support_vector_machine"},{"id":403692,"name":"Curvelet","url":"https://www.academia.edu/Documents/in/Curvelet"},{"id":457105,"name":"Dermoscopy","url":"https://www.academia.edu/Documents/in/Dermoscopy"},{"id":1010893,"name":"Cancers","url":"https://www.academia.edu/Documents/in/Cancers"},{"id":1237788,"name":"Electrical And Electronic Engineering","url":"https://www.academia.edu/Documents/in/Electrical_And_Electronic_Engineering"},{"id":1372175,"name":"Textural Features","url":"https://www.academia.edu/Documents/in/Textural_Features"},{"id":1991646,"name":"Local Binary Patterns","url":"https://www.academia.edu/Documents/in/Local_Binary_Patterns"},{"id":2571968,"name":"Structural Features","url":"https://www.academia.edu/Documents/in/Structural_Features"}],"urls":[{"id":37229920,"url":"https://onlinelibrary.wiley.com/doi/pdf/10.1049/iet-cvi.2017.0193"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609798"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609798/Towards_improving_the_future_of_manufacturing_through_digital_twin_and_augmented_reality_technologies"><img alt="Research paper thumbnail of Towards improving the future of manufacturing through digital twin and augmented reality technologies" class="work-thumbnail" src="https://attachments.academia-assets.com/109098735/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609798/Towards_improving_the_future_of_manufacturing_through_digital_twin_and_augmented_reality_technologies">Towards improving the future of manufacturing through digital twin and augmented reality technologies</a></div><div class="wp-workCard_item"><span>Procedia Manufacturing</span><span>, 2018</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="154ba7982e0aa20d598f9569be034f28" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098735,&quot;asset_id&quot;:111609798,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098735/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609798"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609798"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609798; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609798]").text(description); $(".js-view-count[data-work-id=111609798]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609798; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609798']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609798, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "154ba7982e0aa20d598f9569be034f28" } } $('.js-work-strip[data-work-id=111609798]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609798,"title":"Towards improving the future of manufacturing through digital twin and augmented reality technologies","translated_title":"","metadata":{"publisher":"Elsevier BV","grobid_abstract":"Under the concept of \"Industry 4.0\", production processes will be pushed to be increasingly interconnected, information based on a real time basis and, necessarily, much more efficient. In this context, capacity optimization goes beyond the traditional aim of capacity maximization, contributing also for organization's profitability and value. Indeed, lean management and continuous improvement approaches suggest capacity optimization instead of maximization. The study of capacity optimization and costing models is an important research topic that deserves contributions from both the practical and theoretical perspectives. This paper presents and discusses a mathematical model for capacity management based on different costing models (ABC and TDABC). A generic model has been developed and it was used to analyze idle capacity and to design strategies towards the maximization of organization's value. The trade-off capacity maximization vs operational efficiency is highlighted and it is shown that capacity optimization might hide operational inefficiency.","publication_date":{"day":null,"month":null,"year":2018,"errors":{}},"publication_name":"Procedia Manufacturing","grobid_abstract_attachment_id":109098735},"translated_abstract":null,"internal_url":"https://www.academia.edu/111609798/Towards_improving_the_future_of_manufacturing_through_digital_twin_and_augmented_reality_technologies","translated_internal_url":"","created_at":"2023-12-16T23:55:55.015-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098735,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098735/thumbnails/1.jpg","file_name":"LE2I_PROCMANUFACTURING_2018_ABABSA.pdf","download_url":"https://www.academia.edu/attachments/109098735/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Towards_improving_the_future_of_manufact.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098735/LE2I_PROCMANUFACTURING_2018_ABABSA-libre.pdf?1702800352=\u0026response-content-disposition=attachment%3B+filename%3DTowards_improving_the_future_of_manufact.pdf\u0026Expires=1733201481\u0026Signature=YXGiWDBEdUZ7EHV-yttnMDAFeKUc329CtV9lPWw9d-j75ONsgNC~mNx9Jl6yeADOYaIfqTmVXYBlkxP3VrqxonVDuLFEtr1lRVmD0ZrL4YvY3FfsHIK4EkcUhzs5v1Sb4Bv2h1GFHPSoipMSjfBrJfHJlcXF7bzllkfjEmiLEA2SuB4Tv4YJ6aRPb~-unqVC29W74rjPzHJJn140RYjEDgMG-cQbhVHZtyHmB8w9w~Qd6sIrtW5YAPJm9LhCx1Ckhfqd8xJtvPCkgtM43CWNkaU45tWh93a9NV2Z5Ytqajuoz7tN-cEFCdLtjkrWa9RGH~hcsIsIuuHWzK5GMnkfmw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Towards_improving_the_future_of_manufacturing_through_digital_twin_and_augmented_reality_technologies","translated_slug":"","page_count":9,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098735,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098735/thumbnails/1.jpg","file_name":"LE2I_PROCMANUFACTURING_2018_ABABSA.pdf","download_url":"https://www.academia.edu/attachments/109098735/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"Towards_improving_the_future_of_manufact.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098735/LE2I_PROCMANUFACTURING_2018_ABABSA-libre.pdf?1702800352=\u0026response-content-disposition=attachment%3B+filename%3DTowards_improving_the_future_of_manufact.pdf\u0026Expires=1733201481\u0026Signature=YXGiWDBEdUZ7EHV-yttnMDAFeKUc329CtV9lPWw9d-j75ONsgNC~mNx9Jl6yeADOYaIfqTmVXYBlkxP3VrqxonVDuLFEtr1lRVmD0ZrL4YvY3FfsHIK4EkcUhzs5v1Sb4Bv2h1GFHPSoipMSjfBrJfHJlcXF7bzllkfjEmiLEA2SuB4Tv4YJ6aRPb~-unqVC29W74rjPzHJJn140RYjEDgMG-cQbhVHZtyHmB8w9w~Qd6sIrtW5YAPJm9LhCx1Ckhfqd8xJtvPCkgtM43CWNkaU45tWh93a9NV2Z5Ytqajuoz7tN-cEFCdLtjkrWa9RGH~hcsIsIuuHWzK5GMnkfmw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":26,"name":"Business","url":"https://www.academia.edu/Documents/in/Business"},{"id":5673,"name":"Augmented Reality","url":"https://www.academia.edu/Documents/in/Augmented_Reality"},{"id":8910,"name":"Evaluation","url":"https://www.academia.edu/Documents/in/Evaluation"},{"id":61714,"name":"Production economics","url":"https://www.academia.edu/Documents/in/Production_economics"},{"id":66379,"name":"Automation","url":"https://www.academia.edu/Documents/in/Automation"},{"id":85280,"name":"Industry","url":"https://www.academia.edu/Documents/in/Industry"},{"id":167727,"name":"Industrial Revolution","url":"https://www.academia.edu/Documents/in/Industrial_Revolution"},{"id":511079,"name":"Predictive Maintenance","url":"https://www.academia.edu/Documents/in/Predictive_Maintenance"},{"id":2741797,"name":"Digital Twin","url":"https://www.academia.edu/Documents/in/Digital_Twin"}],"urls":[{"id":37229918,"url":"https://api.elsevier.com/content/article/PII:S2351978918311867?httpAccept=text/xml"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609797"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609797/A_Non_rigid_Face_Tracking_Method_for_Wide_Rotation_Using_Synthetic_Data"><img alt="Research paper thumbnail of A Non-rigid Face Tracking Method for Wide Rotation Using Synthetic Data" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609797/A_Non_rigid_Face_Tracking_Method_for_Wide_Rotation_Using_Synthetic_Data">A Non-rigid Face Tracking Method for Wide Rotation Using Synthetic Data</a></div><div class="wp-workCard_item"><span>Lecture Notes in Computer Science</span><span>, 2015</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">This paper propose a new method for wide-rotation non-rigid face tracking that is still a challen...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">This paper propose a new method for wide-rotation non-rigid face tracking that is still a challenging problem in computer vision community. Our method consists of training and tracking phases. In training, we propose to use a large off-line synthetic database to overcome the problem of data collection. The local appearance models are then trained using linear Support Vector Machine (SVM). In tracking, we propose a two-step approach: (i) The first step uses baseline matching for a good initialization. The matching strategy between the current frame and a set of adaptive keyframes is also involved to be recoverable in terms of failed tracking. (ii) The second step estimates the model parameters using a heuristic method via pose-wise SVMs. The combination makes our approach work robustly with wide rotation, up to \(90^{\circ }\) of vertical axis. In addition, our method appears to be robust even in the presence of fast movements thanks to baseline matching. Compared to state-of-the-art methods, our method shows a good compromise of rigid and non-rigid parameter accuracies. This study gives a promising perspective because of the good results in terms of pose estimation (average error is less than \(4^o\) on BUFT dataset) and landmark tracking precision (5.8 pixel error compared to 6.8 of one state-of-the-art method on Talking Face video. These results highlight the potential of using synthetic data to track non-rigid face in unconstrained poses.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609797"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609797"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609797; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609797]").text(description); $(".js-view-count[data-work-id=111609797]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609797; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609797']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609797, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=111609797]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609797,"title":"A Non-rigid Face Tracking Method for Wide Rotation Using Synthetic Data","translated_title":"","metadata":{"abstract":"This paper propose a new method for wide-rotation non-rigid face tracking that is still a challenging problem in computer vision community. Our method consists of training and tracking phases. In training, we propose to use a large off-line synthetic database to overcome the problem of data collection. The local appearance models are then trained using linear Support Vector Machine (SVM). In tracking, we propose a two-step approach: (i) The first step uses baseline matching for a good initialization. The matching strategy between the current frame and a set of adaptive keyframes is also involved to be recoverable in terms of failed tracking. (ii) The second step estimates the model parameters using a heuristic method via pose-wise SVMs. The combination makes our approach work robustly with wide rotation, up to \\(90^{\\circ }\\) of vertical axis. In addition, our method appears to be robust even in the presence of fast movements thanks to baseline matching. Compared to state-of-the-art methods, our method shows a good compromise of rigid and non-rigid parameter accuracies. This study gives a promising perspective because of the good results in terms of pose estimation (average error is less than \\(4^o\\) on BUFT dataset) and landmark tracking precision (5.8 pixel error compared to 6.8 of one state-of-the-art method on Talking Face video. These results highlight the potential of using synthetic data to track non-rigid face in unconstrained poses.","publication_date":{"day":null,"month":null,"year":2015,"errors":{}},"publication_name":"Lecture Notes in Computer Science"},"translated_abstract":"This paper propose a new method for wide-rotation non-rigid face tracking that is still a challenging problem in computer vision community. Our method consists of training and tracking phases. In training, we propose to use a large off-line synthetic database to overcome the problem of data collection. The local appearance models are then trained using linear Support Vector Machine (SVM). In tracking, we propose a two-step approach: (i) The first step uses baseline matching for a good initialization. The matching strategy between the current frame and a set of adaptive keyframes is also involved to be recoverable in terms of failed tracking. (ii) The second step estimates the model parameters using a heuristic method via pose-wise SVMs. The combination makes our approach work robustly with wide rotation, up to \\(90^{\\circ }\\) of vertical axis. In addition, our method appears to be robust even in the presence of fast movements thanks to baseline matching. Compared to state-of-the-art methods, our method shows a good compromise of rigid and non-rigid parameter accuracies. This study gives a promising perspective because of the good results in terms of pose estimation (average error is less than \\(4^o\\) on BUFT dataset) and landmark tracking precision (5.8 pixel error compared to 6.8 of one state-of-the-art method on Talking Face video. These results highlight the potential of using synthetic data to track non-rigid face in unconstrained poses.","internal_url":"https://www.academia.edu/111609797/A_Non_rigid_Face_Tracking_Method_for_Wide_Rotation_Using_Synthetic_Data","translated_internal_url":"","created_at":"2023-12-16T23:55:54.859-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[],"slug":"A_Non_rigid_Face_Tracking_Method_for_Wide_Rotation_Using_Synthetic_Data","translated_slug":"","page_count":null,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence"},{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision"}],"urls":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="111609796"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/111609796/A_Depth_based_Approach_for_3D_Dynamic_Gesture_Recognition"><img alt="Research paper thumbnail of A Depth-based Approach for 3D Dynamic Gesture Recognition" class="work-thumbnail" src="https://attachments.academia-assets.com/109098734/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/111609796/A_Depth_based_Approach_for_3D_Dynamic_Gesture_Recognition">A Depth-based Approach for 3D Dynamic Gesture Recognition</a></div><div class="wp-workCard_item"><span>Proceedings of the 12th International Conference on Informatics in Control, Automation and Robotics</span><span>, 2015</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="4df3f8ee5842f036fc71a74263db4bd6" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{&quot;attachment_id&quot;:109098734,&quot;asset_id&quot;:111609796,&quot;asset_type&quot;:&quot;Work&quot;,&quot;button_location&quot;:&quot;profile&quot;}" href="https://www.academia.edu/attachments/109098734/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="111609796"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span><span id="work-strip-rankings-button-container"></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="111609796"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 111609796; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=111609796]").text(description); $(".js-view-count[data-work-id=111609796]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 111609796; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='111609796']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span><span><script>$(function() { new Works.PaperRankView({ workId: 111609796, container: "", }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "4df3f8ee5842f036fc71a74263db4bd6" } } $('.js-work-strip[data-work-id=111609796]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":111609796,"title":"A Depth-based Approach for 3D Dynamic Gesture Recognition","translated_title":"","metadata":{"publisher":"SCITEPRESS - Science and and Technology Publications","grobid_abstract":"In this paper we propose a recognition technique of 3D dynamic gesture for human robot interaction (HRI) based on depth information provided by Kinect sensor. The body is tracked using the skeleton algorithm provided by the Kinect SDK. The main idea of this work is to compute the angles of the upper body joints which are active when executing gesture. The variation of these angles are used as inputs of Hidden Markov Models (HMM) in order to recognize the dynamic gestures. Results demonstrate the robustness of our method against environmental conditions such as illumination changes and scene complexity due to using depth information only.","publication_date":{"day":null,"month":null,"year":2015,"errors":{}},"publication_name":"Proceedings of the 12th International Conference on Informatics in Control, Automation and Robotics","grobid_abstract_attachment_id":109098734},"translated_abstract":null,"internal_url":"https://www.academia.edu/111609796/A_Depth_based_Approach_for_3D_Dynamic_Gesture_Recognition","translated_internal_url":"","created_at":"2023-12-16T23:55:54.710-08:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":263837202,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[],"downloadable_attachments":[{"id":109098734,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098734/thumbnails/1.jpg","file_name":"Hajar_HIYADI_ICINCO.pdf","download_url":"https://www.academia.edu/attachments/109098734/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"A_Depth_based_Approach_for_3D_Dynamic_Ge.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098734/Hajar_HIYADI_ICINCO-libre.pdf?1702800346=\u0026response-content-disposition=attachment%3B+filename%3DA_Depth_based_Approach_for_3D_Dynamic_Ge.pdf\u0026Expires=1733201481\u0026Signature=L~kPLqozZvyb5RVLtdAXeSwUNTUjErE3ait5vWmWdT0LqCduGPcCWBF~JGJh~U5KGe-iYaBkRFIVmgDY93puOGZxISqotzbs3lfIqPi7BFbYj-coOlhow6M0Bh~xUBAF154hwQLcsbyxzEk1zSNcxI0MIOQ4YPk0Lj5xzeoIzzjLsV1JhIPGb6B9VXFr1QeQmZCmB5ggYDJiTVlAdOoiEIBY0Pp9ddDnMUQUr-e3DnO-hCPjqXfr9p-40QPV9hHOPMnU~pSozFS7F7cwWbiBe8U6dLcdn7w8rQeWBhfJAgS8EI9P2NWnHBMpUSM4mcQ-RtnQmLJULyiIamzkEx-lxw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"A_Depth_based_Approach_for_3D_Dynamic_Gesture_Recognition","translated_slug":"","page_count":10,"language":"en","content_type":"Work","owner":{"id":263837202,"first_name":"Fakhreddine","middle_initials":null,"last_name":"Ababsa","page_name":"FakhreddineAbabsa","domain_name":"gadz","created_at":"2023-03-30T04:13:56.773-07:00","display_name":"Fakhreddine Ababsa","url":"https://gadz.academia.edu/FakhreddineAbabsa"},"attachments":[{"id":109098734,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/109098734/thumbnails/1.jpg","file_name":"Hajar_HIYADI_ICINCO.pdf","download_url":"https://www.academia.edu/attachments/109098734/download_file?st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&st=MTczMzE5Nzg4MSw4LjIyMi4yMDguMTQ2&","bulk_download_file_name":"A_Depth_based_Approach_for_3D_Dynamic_Ge.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/109098734/Hajar_HIYADI_ICINCO-libre.pdf?1702800346=\u0026response-content-disposition=attachment%3B+filename%3DA_Depth_based_Approach_for_3D_Dynamic_Ge.pdf\u0026Expires=1733201481\u0026Signature=L~kPLqozZvyb5RVLtdAXeSwUNTUjErE3ait5vWmWdT0LqCduGPcCWBF~JGJh~U5KGe-iYaBkRFIVmgDY93puOGZxISqotzbs3lfIqPi7BFbYj-coOlhow6M0Bh~xUBAF154hwQLcsbyxzEk1zSNcxI0MIOQ4YPk0Lj5xzeoIzzjLsV1JhIPGb6B9VXFr1QeQmZCmB5ggYDJiTVlAdOoiEIBY0Pp9ddDnMUQUr-e3DnO-hCPjqXfr9p-40QPV9hHOPMnU~pSozFS7F7cwWbiBe8U6dLcdn7w8rQeWBhfJAgS8EI9P2NWnHBMpUSM4mcQ-RtnQmLJULyiIamzkEx-lxw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":422,"name":"Computer Science","url":"https://www.academia.edu/Documents/in/Computer_Science"},{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence"},{"id":854,"name":"Computer Vision","url":"https://www.academia.edu/Documents/in/Computer_Vision"},{"id":3147,"name":"Gesture","url":"https://www.academia.edu/Documents/in/Gesture"},{"id":17701,"name":"Gesture Recognition","url":"https://www.academia.edu/Documents/in/Gesture_Recognition"},{"id":143539,"name":"hidden Markov model","url":"https://www.academia.edu/Documents/in/hidden_Markov_model"},{"id":210122,"name":"Robustness (evolution)","url":"https://www.academia.edu/Documents/in/Robustness_evolution_"}],"urls":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> </div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js","https://a.academia-assets.com/assets/google_contacts-0dfb882d836b94dbcb4a2d123d6933fc9533eda5be911641f20b4eb428429600.js"], function() { // from javascript_helper.rb $('.js-google-connect-button').click(function(e) { e.preventDefault(); GoogleContacts.authorize_and_show_contacts(); Aedu.Dismissibles.recordClickthrough("WowProfileImportContactsPrompt"); }); $('.js-update-biography-button').click(function(e) { e.preventDefault(); Aedu.Dismissibles.recordClickthrough("UpdateUserBiographyPrompt"); $.ajax({ url: $r.api_v0_profiles_update_about_path({ subdomain_param: 'api', about: "", }), type: 'PUT', success: function(response) { location.reload(); } }); }); $('.js-work-creator-button').click(function (e) { e.preventDefault(); window.location = $r.upload_funnel_document_path({ source: encodeURIComponent(""), }); }); $('.js-video-upload-button').click(function (e) { e.preventDefault(); window.location = $r.upload_funnel_video_path({ source: encodeURIComponent(""), }); }); $('.js-do-this-later-button').click(function() { $(this).closest('.js-profile-nag-panel').remove(); Aedu.Dismissibles.recordDismissal("WowProfileImportContactsPrompt"); }); $('.js-update-biography-do-this-later-button').click(function(){ $(this).closest('.js-profile-nag-panel').remove(); Aedu.Dismissibles.recordDismissal("UpdateUserBiographyPrompt"); }); $('.wow-profile-mentions-upsell--close').click(function(){ $('.wow-profile-mentions-upsell--panel').hide(); Aedu.Dismissibles.recordDismissal("WowProfileMentionsUpsell"); }); $('.wow-profile-mentions-upsell--button').click(function(){ Aedu.Dismissibles.recordClickthrough("WowProfileMentionsUpsell"); }); new WowProfile.SocialRedesignUserWorks({ initialWorksOffset: 20, allWorksOffset: 20, maxSections: 1 }) }); </script> </div></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile_edit-5ea339ee107c863779f560dd7275595239fed73f1a13d279d2b599a28c0ecd33.js","https://a.academia-assets.com/assets/add_coauthor-22174b608f9cb871d03443cafa7feac496fb50d7df2d66a53f5ee3c04ba67f53.js","https://a.academia-assets.com/assets/tab-dcac0130902f0cc2d8cb403714dd47454f11fc6fb0e99ae6a0827b06613abc20.js","https://a.academia-assets.com/assets/wow_profile-f77ea15d77ce96025a6048a514272ad8becbad23c641fc2b3bd6e24ca6ff1932.js"], function() { // from javascript_helper.rb window.ae = window.ae || {}; window.ae.WowProfile = window.ae.WowProfile || {}; if(Aedu.User.current && Aedu.User.current.id === $viewedUser.id) { window.ae.WowProfile.current_user_edit = {}; new WowProfileEdit.EditUploadView({ el: '.js-edit-upload-button-wrapper', model: window.$current_user, }); new AddCoauthor.AddCoauthorsController(); } var userInfoView = new WowProfile.SocialRedesignUserInfo({ recaptcha_key: "6LdxlRMTAAAAADnu_zyLhLg0YF9uACwz78shpjJB" }); WowProfile.router = new WowProfile.Router({ userInfoView: userInfoView }); Backbone.history.start({ pushState: true, root: "/" + $viewedUser.page_name }); new WowProfile.UserWorksNav() }); </script> </div> <div class="bootstrap login"><div class="modal fade login-modal" id="login-modal"><div class="login-modal-dialog modal-dialog"><div class="modal-content"><div class="modal-header"><button class="close close" data-dismiss="modal" type="button"><span aria-hidden="true">&times;</span><span class="sr-only">Close</span></button><h4 class="modal-title text-center"><strong>Log In</strong></h4></div><div class="modal-body"><div class="row"><div class="col-xs-10 col-xs-offset-1"><button class="btn btn-fb btn-lg btn-block btn-v-center-content" id="login-facebook-oauth-button"><svg style="float: left; width: 19px; line-height: 1em; margin-right: .3em;" aria-hidden="true" focusable="false" data-prefix="fab" data-icon="facebook-square" class="svg-inline--fa fa-facebook-square fa-w-14" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 512"><path fill="currentColor" d="M400 32H48A48 48 0 0 0 0 80v352a48 48 0 0 0 48 48h137.25V327.69h-63V256h63v-54.64c0-62.15 37-96.48 93.67-96.48 27.14 0 55.52 4.84 55.52 4.84v61h-31.27c-30.81 0-40.42 19.12-40.42 38.73V256h68.78l-11 71.69h-57.78V480H400a48 48 0 0 0 48-48V80a48 48 0 0 0-48-48z"></path></svg><small><strong>Log in</strong> with <strong>Facebook</strong></small></button><br /><button class="btn btn-google btn-lg btn-block btn-v-center-content" id="login-google-oauth-button"><svg style="float: left; width: 22px; line-height: 1em; margin-right: .3em;" aria-hidden="true" focusable="false" data-prefix="fab" data-icon="google-plus" class="svg-inline--fa fa-google-plus fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M256,8C119.1,8,8,119.1,8,256S119.1,504,256,504,504,392.9,504,256,392.9,8,256,8ZM185.3,380a124,124,0,0,1,0-248c31.3,0,60.1,11,83,32.3l-33.6,32.6c-13.2-12.9-31.3-19.1-49.4-19.1-42.9,0-77.2,35.5-77.2,78.1S142.3,334,185.3,334c32.6,0,64.9-19.1,70.1-53.3H185.3V238.1H302.2a109.2,109.2,0,0,1,1.9,20.7c0,70.8-47.5,121.2-118.8,121.2ZM415.5,273.8v35.5H380V273.8H344.5V238.3H380V202.8h35.5v35.5h35.2v35.5Z"></path></svg><small><strong>Log in</strong> with <strong>Google</strong></small></button><br /><style type="text/css">.sign-in-with-apple-button { width: 100%; height: 52px; border-radius: 3px; border: 1px solid black; cursor: pointer; }</style><script src="https://appleid.cdn-apple.com/appleauth/static/jsapi/appleid/1/en_US/appleid.auth.js" type="text/javascript"></script><div class="sign-in-with-apple-button" data-border="false" data-color="white" id="appleid-signin"><span &nbsp;&nbsp;="Sign Up with Apple" class="u-fs11"></span></div><script>AppleID.auth.init({ clientId: 'edu.academia.applesignon', scope: 'name email', redirectURI: 'https://www.academia.edu/sessions', state: "352bf858f4a4f9bf9e20a631d8dba3cb94c34fc05d7776f9f125af1db249e91b", });</script><script>// Hacky way of checking if on fast loswp if (window.loswp == null) { (function() { const Google = window?.Aedu?.Auth?.OauthButton?.Login?.Google; const Facebook = window?.Aedu?.Auth?.OauthButton?.Login?.Facebook; if (Google) { new Google({ el: '#login-google-oauth-button', rememberMeCheckboxId: 'remember_me', track: null }); } if (Facebook) { new Facebook({ el: '#login-facebook-oauth-button', rememberMeCheckboxId: 'remember_me', track: null }); } })(); }</script></div></div></div><div class="modal-body"><div class="row"><div class="col-xs-10 col-xs-offset-1"><div class="hr-heading login-hr-heading"><span class="hr-heading-text">or</span></div></div></div></div><div class="modal-body"><div class="row"><div class="col-xs-10 col-xs-offset-1"><form class="js-login-form" action="https://www.academia.edu/sessions" accept-charset="UTF-8" method="post"><input name="utf8" type="hidden" value="&#x2713;" autocomplete="off" /><input type="hidden" name="authenticity_token" value="aHqJp435xbvqzSvpSwLQvedHsx+BYRLdrsk3RpXI2MGKThzd0XSisiIXM5ei6MKBwjMFA0t5P1V2gUhTVc2SIA==" autocomplete="off" /><div class="form-group"><label class="control-label" for="login-modal-email-input" style="font-size: 14px;">Email</label><input class="form-control" id="login-modal-email-input" name="login" type="email" /></div><div class="form-group"><label class="control-label" for="login-modal-password-input" style="font-size: 14px;">Password</label><input class="form-control" id="login-modal-password-input" name="password" type="password" /></div><input type="hidden" name="post_login_redirect_url" id="post_login_redirect_url" value="https://gadz.academia.edu/FakhreddineAbabsa" autocomplete="off" /><div class="checkbox"><label><input type="checkbox" name="remember_me" id="remember_me" value="1" checked="checked" /><small style="font-size: 12px; margin-top: 2px; display: inline-block;">Remember me on this computer</small></label></div><br><input type="submit" name="commit" value="Log In" class="btn btn-primary btn-block btn-lg js-login-submit" data-disable-with="Log In" /></br></form><script>typeof window?.Aedu?.recaptchaManagedForm === 'function' && window.Aedu.recaptchaManagedForm( document.querySelector('.js-login-form'), document.querySelector('.js-login-submit') );</script><small style="font-size: 12px;"><br />or <a data-target="#login-modal-reset-password-container" data-toggle="collapse" href="javascript:void(0)">reset password</a></small><div class="collapse" id="login-modal-reset-password-container"><br /><div class="well margin-0x"><form class="js-password-reset-form" action="https://www.academia.edu/reset_password" accept-charset="UTF-8" method="post"><input name="utf8" type="hidden" value="&#x2713;" autocomplete="off" /><input type="hidden" name="authenticity_token" value="E+Rmd85A+4ij9QKIDylpa9c+R2beBwb/pZbn8MDY2Jnx0PMNks2cgWsvGvbmw3tX8krxehQfK3d93pjlAN2SeA==" autocomplete="off" /><p>Enter the email address you signed up with and we&#39;ll email you a reset link.</p><div class="form-group"><input class="form-control" name="email" type="email" /></div><script src="https://recaptcha.net/recaptcha/api.js" async defer></script> <script> var invisibleRecaptchaSubmit = function () { var closestForm = function (ele) { var curEle = ele.parentNode; while (curEle.nodeName !== 'FORM' && curEle.nodeName !== 'BODY'){ curEle = curEle.parentNode; } return curEle.nodeName === 'FORM' ? curEle : null }; var eles = document.getElementsByClassName('g-recaptcha'); if (eles.length > 0) { var form = closestForm(eles[0]); if (form) { form.submit(); } } }; </script> <input type="submit" data-sitekey="6Lf3KHUUAAAAACggoMpmGJdQDtiyrjVlvGJ6BbAj" data-callback="invisibleRecaptchaSubmit" class="g-recaptcha btn btn-primary btn-block" value="Email me a link" value=""/> </form></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/collapse-45805421cf446ca5adf7aaa1935b08a3a8d1d9a6cc5d91a62a2a3a00b20b3e6a.js"], function() { // from javascript_helper.rb $("#login-modal-reset-password-container").on("shown.bs.collapse", function() { $(this).find("input[type=email]").focus(); }); }); </script> </div></div></div><div class="modal-footer"><div class="text-center"><small style="font-size: 12px;">Need an account?&nbsp;<a rel="nofollow" href="https://www.academia.edu/signup">Click here to sign up</a></small></div></div></div></div></div></div><script>// If we are on subdomain or non-bootstrapped page, redirect to login page instead of showing modal (function(){ if (typeof $ === 'undefined') return; var host = window.location.hostname; if ((host === $domain || host === "www."+$domain) && (typeof $().modal === 'function')) { $("#nav_log_in").click(function(e) { // Don't follow the link and open the modal e.preventDefault(); $("#login-modal").on('shown.bs.modal', function() { $(this).find("#login-modal-email-input").focus() }).modal('show'); }); } })()</script> <div class="bootstrap" id="footer"><div class="footer-content clearfix text-center padding-top-7x" style="width:100%;"><ul class="footer-links-secondary footer-links-wide list-inline margin-bottom-1x"><li><a href="https://www.academia.edu/about">About</a></li><li><a href="https://www.academia.edu/press">Press</a></li><li><a rel="nofollow" href="https://medium.com/academia">Blog</a></li><li><a href="https://www.academia.edu/documents">Papers</a></li><li><a href="https://www.academia.edu/topics">Topics</a></li><li><a href="https://www.academia.edu/journals">Academia.edu Journals</a></li><li><a rel="nofollow" href="https://www.academia.edu/hiring"><svg style="width: 13px; height: 13px;" aria-hidden="true" focusable="false" data-prefix="fas" data-icon="briefcase" class="svg-inline--fa fa-briefcase fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M320 336c0 8.84-7.16 16-16 16h-96c-8.84 0-16-7.16-16-16v-48H0v144c0 25.6 22.4 48 48 48h416c25.6 0 48-22.4 48-48V288H320v48zm144-208h-80V80c0-25.6-22.4-48-48-48H176c-25.6 0-48 22.4-48 48v48H48c-25.6 0-48 22.4-48 48v80h512v-80c0-25.6-22.4-48-48-48zm-144 0H192V96h128v32z"></path></svg>&nbsp;<strong>We're Hiring!</strong></a></li><li><a rel="nofollow" href="https://support.academia.edu/"><svg style="width: 12px; height: 12px;" aria-hidden="true" focusable="false" data-prefix="fas" data-icon="question-circle" class="svg-inline--fa fa-question-circle fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M504 256c0 136.997-111.043 248-248 248S8 392.997 8 256C8 119.083 119.043 8 256 8s248 111.083 248 248zM262.655 90c-54.497 0-89.255 22.957-116.549 63.758-3.536 5.286-2.353 12.415 2.715 16.258l34.699 26.31c5.205 3.947 12.621 3.008 16.665-2.122 17.864-22.658 30.113-35.797 57.303-35.797 20.429 0 45.698 13.148 45.698 32.958 0 14.976-12.363 22.667-32.534 33.976C247.128 238.528 216 254.941 216 296v4c0 6.627 5.373 12 12 12h56c6.627 0 12-5.373 12-12v-1.333c0-28.462 83.186-29.647 83.186-106.667 0-58.002-60.165-102-116.531-102zM256 338c-25.365 0-46 20.635-46 46 0 25.364 20.635 46 46 46s46-20.636 46-46c0-25.365-20.635-46-46-46z"></path></svg>&nbsp;<strong>Help Center</strong></a></li></ul><ul class="footer-links-tertiary list-inline margin-bottom-1x"><li class="small">Find new research papers in:</li><li class="small"><a href="https://www.academia.edu/Documents/in/Physics">Physics</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Chemistry">Chemistry</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Biology">Biology</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Health_Sciences">Health Sciences</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Ecology">Ecology</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Earth_Sciences">Earth Sciences</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Cognitive_Science">Cognitive Science</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Mathematics">Mathematics</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Computer_Science">Computer Science</a></li></ul></div></div><div class="DesignSystem" id="credit" style="width:100%;"><ul class="u-pl0x footer-links-legal list-inline"><li><a rel="nofollow" href="https://www.academia.edu/terms">Terms</a></li><li><a rel="nofollow" href="https://www.academia.edu/privacy">Privacy</a></li><li><a rel="nofollow" href="https://www.academia.edu/copyright">Copyright</a></li><li>Academia &copy;2024</li></ul></div><script> //<![CDATA[ window.detect_gmtoffset = true; window.Academia && window.Academia.set_gmtoffset && Academia.set_gmtoffset('/gmtoffset'); //]]> </script> <div id='overlay_background'></div> <div id='bootstrap-modal-container' class='bootstrap'></div> <div id='ds-modal-container' class='bootstrap DesignSystem'></div> <div id='full-screen-modal'></div> </div> </body> </html>

Pages: 1 2 3 4 5 6 7 8 9 10