CINXE.COM
Haris Dindo - Academia.edu
<!DOCTYPE html> <html lang="en" xmlns:fb="http://www.facebook.com/2008/fbml" class="wf-loading"> <head prefix="og: https://ogp.me/ns# fb: https://ogp.me/ns/fb# academia: https://ogp.me/ns/fb/academia#"> <meta charset="utf-8"> <meta name=viewport content="width=device-width, initial-scale=1"> <meta rel="search" type="application/opensearchdescription+xml" href="/open_search.xml" title="Academia.edu"> <title>Haris Dindo - Academia.edu</title> <!-- _ _ _ | | (_) | | __ _ ___ __ _ __| | ___ _ __ ___ _ __ _ ___ __| |_ _ / _` |/ __/ _` |/ _` |/ _ \ '_ ` _ \| |/ _` | / _ \/ _` | | | | | (_| | (_| (_| | (_| | __/ | | | | | | (_| || __/ (_| | |_| | \__,_|\___\__,_|\__,_|\___|_| |_| |_|_|\__,_(_)___|\__,_|\__,_| We're hiring! See https://www.academia.edu/hiring --> <link href="//a.academia-assets.com/images/favicons/favicon-production.ico" rel="shortcut icon" type="image/vnd.microsoft.icon"> <link rel="apple-touch-icon" sizes="57x57" href="//a.academia-assets.com/images/favicons/apple-touch-icon-57x57.png"> <link rel="apple-touch-icon" sizes="60x60" href="//a.academia-assets.com/images/favicons/apple-touch-icon-60x60.png"> <link rel="apple-touch-icon" sizes="72x72" href="//a.academia-assets.com/images/favicons/apple-touch-icon-72x72.png"> <link rel="apple-touch-icon" sizes="76x76" href="//a.academia-assets.com/images/favicons/apple-touch-icon-76x76.png"> <link rel="apple-touch-icon" sizes="114x114" href="//a.academia-assets.com/images/favicons/apple-touch-icon-114x114.png"> <link rel="apple-touch-icon" sizes="120x120" href="//a.academia-assets.com/images/favicons/apple-touch-icon-120x120.png"> <link rel="apple-touch-icon" sizes="144x144" href="//a.academia-assets.com/images/favicons/apple-touch-icon-144x144.png"> <link rel="apple-touch-icon" sizes="152x152" href="//a.academia-assets.com/images/favicons/apple-touch-icon-152x152.png"> <link rel="apple-touch-icon" sizes="180x180" href="//a.academia-assets.com/images/favicons/apple-touch-icon-180x180.png"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-32x32.png" sizes="32x32"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-194x194.png" sizes="194x194"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-96x96.png" sizes="96x96"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/android-chrome-192x192.png" sizes="192x192"> <link rel="icon" type="image/png" href="//a.academia-assets.com/images/favicons/favicon-16x16.png" sizes="16x16"> <link rel="manifest" href="//a.academia-assets.com/images/favicons/manifest.json"> <meta name="msapplication-TileColor" content="#2b5797"> <meta name="msapplication-TileImage" content="//a.academia-assets.com/images/favicons/mstile-144x144.png"> <meta name="theme-color" content="#ffffff"> <script> window.performance && window.performance.measure && window.performance.measure("Time To First Byte", "requestStart", "responseStart"); </script> <script> (function() { if (!window.URLSearchParams || !window.history || !window.history.replaceState) { return; } var searchParams = new URLSearchParams(window.location.search); var paramsToDelete = [ 'fs', 'sm', 'swp', 'iid', 'nbs', 'rcc', // related content category 'rcpos', // related content carousel position 'rcpg', // related carousel page 'rchid', // related content hit id 'f_ri', // research interest id, for SEO tracking 'f_fri', // featured research interest, for SEO tracking (param key without value) 'f_rid', // from research interest directory for SEO tracking 'f_loswp', // from research interest pills on LOSWP sidebar for SEO tracking 'rhid', // referrring hit id ]; if (paramsToDelete.every((key) => searchParams.get(key) === null)) { return; } paramsToDelete.forEach((key) => { searchParams.delete(key); }); var cleanUrl = new URL(window.location.href); cleanUrl.search = searchParams.toString(); history.replaceState({}, document.title, cleanUrl); })(); </script> <script async src="https://www.googletagmanager.com/gtag/js?id=G-5VKX33P2DS"></script> <script> window.dataLayer = window.dataLayer || []; function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'G-5VKX33P2DS', { cookie_domain: 'academia.edu', send_page_view: false, }); gtag('event', 'page_view', { 'controller': "profiles/works", 'action': "summary", 'controller_action': 'profiles/works#summary', 'logged_in': 'false', 'edge': 'unknown', // Send nil if there is no A/B test bucket, in case some records get logged // with missing data - that way we can distinguish between the two cases. // ab_test_bucket should be of the form <ab_test_name>:<bucket> 'ab_test_bucket': null, }) </script> <script type="text/javascript"> window.sendUserTiming = function(timingName) { if (!(window.performance && window.performance.measure)) return; var entries = window.performance.getEntriesByName(timingName, "measure"); if (entries.length !== 1) return; var timingValue = Math.round(entries[0].duration); gtag('event', 'timing_complete', { name: timingName, value: timingValue, event_category: 'User-centric', }); }; window.sendUserTiming("Time To First Byte"); </script> <meta name="csrf-param" content="authenticity_token" /> <meta name="csrf-token" content="ar2bwP--ypVVwEbRrPtJqZBH9qvHUAmNH6_EhdRJ6F4GLQXns13g4iX_pQSiRAQgaDbniDSdL4-FinabeVxW4g" /> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/wow-3d36c19b4875b226bfed0fcba1dcea3f2fe61148383d97c0465c016b8c969290.css" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/social/home-79e78ce59bef0a338eb6540ec3d93b4a7952115b56c57f1760943128f4544d42.css" /><script type="application/ld+json">{"@context":"https://schema.org","@type":"ProfilePage","mainEntity":{"@context":"https://schema.org","@type":"Person","name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo","sameAs":[]},"dateCreated":"2015-10-28T16:44:45-07:00","dateModified":"2022-06-29T00:35:10-07:00","name":"Haris Dindo","description":"","sameAs":[],"relatedLink":"https://www.academia.edu/20159382/Bounded_Seed_AGI"}</script><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/heading-95367dc03b794f6737f30123738a886cf53b7a65cdef98a922a98591d60063e3.css" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/button-8c9ae4b5c8a2531640c354d92a1f3579c8ff103277ef74913e34c8a76d4e6c00.css" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/body-170d1319f0e354621e81ca17054bb147da2856ec0702fe440a99af314a6338c5.css" /><link crossorigin="" href="https://fonts.gstatic.com/" rel="preconnect" /><link href="https://fonts.googleapis.com/css2?family=DM+Sans:ital,opsz,wght@0,9..40,100..1000;1,9..40,100..1000&family=Gupter:wght@400;500;700&family=IBM+Plex+Mono:wght@300;400&family=Material+Symbols+Outlined:opsz,wght,FILL,GRAD@20,400,0,0&display=swap" rel="stylesheet" /><link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system/common-2b6f90dbd75f5941bc38f4ad716615f3ac449e7398313bb3bc225fba451cd9fa.css" /> <meta name="author" content="haris dindo" /> <meta name="description" content="Haris Dindo: 33 Followers, 17 Following, 78 Research papers. Research interests: Fractal, Human Gait Analysis, and Dynamical systems and Chaos." /> <meta name="google-site-verification" content="bKJMBZA7E43xhDOopFZkssMMkBRjvYERV-NaN4R6mrs" /> <script> var $controller_name = 'works'; var $action_name = "summary"; var $rails_env = 'production'; var $app_rev = '8bb59e8ce028f234ef678358ae60ce6b16b0b6f4'; var $domain = 'academia.edu'; var $app_host = "academia.edu"; var $asset_host = "academia-assets.com"; var $start_time = new Date().getTime(); var $recaptcha_key = "6LdxlRMTAAAAADnu_zyLhLg0YF9uACwz78shpjJB"; var $recaptcha_invisible_key = "6Lf3KHUUAAAAACggoMpmGJdQDtiyrjVlvGJ6BbAj"; var $disableClientRecordHit = false; </script> <script> window.Aedu = { hit_data: null }; window.Aedu.SiteStats = {"premium_universities_count":13994,"monthly_visitors":"114 million","monthly_visitor_count":114517624,"monthly_visitor_count_in_millions":114,"user_count":284180556,"paper_count":55203019,"paper_count_in_millions":55,"page_count":432000000,"page_count_in_millions":432,"pdf_count":16500000,"pdf_count_in_millions":16}; window.Aedu.serverRenderTime = new Date(1741083864000); window.Aedu.timeDifference = new Date().getTime() - 1741083864000; window.Aedu.isUsingCssV1 = false; window.Aedu.enableLocalization = true; window.Aedu.activateFullstory = false; window.Aedu.serviceAvailability = { status: {"attention_db":"on","bibliography_db":"on","contacts_db":"on","email_db":"on","indexability_db":"on","mentions_db":"on","news_db":"on","notifications_db":"on","offsite_mentions_db":"on","redshift":"on","redshift_exports_db":"on","related_works_db":"on","ring_db":"on","user_tests_db":"on"}, serviceEnabled: function(service) { return this.status[service] === "on"; }, readEnabled: function(service) { return this.serviceEnabled(service) || this.status[service] === "read_only"; }, }; window.Aedu.viewApmTrace = function() { // Check if x-apm-trace-id meta tag is set, and open the trace in APM // in a new window if it is. var apmTraceId = document.head.querySelector('meta[name="x-apm-trace-id"]'); if (apmTraceId) { var traceId = apmTraceId.content; // Use trace ID to construct URL, an example URL looks like: // https://app.datadoghq.com/apm/traces?query=trace_id%31298410148923562634 var apmUrl = 'https://app.datadoghq.com/apm/traces?query=trace_id%3A' + traceId; window.open(apmUrl, '_blank'); } }; </script> <!--[if lt IE 9]> <script src="//cdnjs.cloudflare.com/ajax/libs/html5shiv/3.7.2/html5shiv.min.js"></script> <![endif]--> <link href="https://fonts.googleapis.com/css?family=Roboto:100,100i,300,300i,400,400i,500,500i,700,700i,900,900i" rel="stylesheet"> <link rel="preload" href="//maxcdn.bootstrapcdn.com/font-awesome/4.3.0/css/font-awesome.min.css" as="style" onload="this.rel='stylesheet'"> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/libraries-a9675dcb01ec4ef6aa807ba772c7a5a00c1820d3ff661c1038a20f80d06bb4e4.css" /> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/academia-1eb081e01ca8bc0c1b1d866df79d9eb4dd2c484e4beecf76e79a7806c72fee08.css" /> <link rel="stylesheet" media="all" href="//a.academia-assets.com/assets/design_system_legacy-056a9113b9a0f5343d013b29ee1929d5a18be35fdcdceb616600b4db8bd20054.css" /> <script src="//a.academia-assets.com/assets/webpack_bundles/runtime-bundle-005434038af4252ca37c527588411a3d6a0eabb5f727fac83f8bbe7fd88d93bb.js"></script> <script src="//a.academia-assets.com/assets/webpack_bundles/webpack_libraries_and_infrequently_changed.wjs-bundle-803e71d0b85992fbb0dfd37651b4efefde12a19c48ab47f5a49414891d1ab4d4.js"></script> <script src="//a.academia-assets.com/assets/webpack_bundles/core_webpack.wjs-bundle-53737366d2690238229ab1eddd7d1618e8a89d97bf6767ded339b2668277487a.js"></script> <script src="//a.academia-assets.com/assets/webpack_bundles/sentry.wjs-bundle-5fe03fddca915c8ba0f7edbe64c194308e8ce5abaed7bffe1255ff37549c4808.js"></script> <script> jade = window.jade || {}; jade.helpers = window.$h; jade._ = window._; </script> <!-- Google Tag Manager --> <script id="tag-manager-head-root">(function(w,d,s,l,i){w[l]=w[l]||[];w[l].push({'gtm.start': new Date().getTime(),event:'gtm.js'});var f=d.getElementsByTagName(s)[0], j=d.createElement(s),dl=l!='dataLayer'?'&l='+l:'';j.async=true;j.src= 'https://www.googletagmanager.com/gtm.js?id='+i+dl;f.parentNode.insertBefore(j,f); })(window,document,'script','dataLayer_old','GTM-5G9JF7Z');</script> <!-- End Google Tag Manager --> <script> window.gptadslots = []; window.googletag = window.googletag || {}; window.googletag.cmd = window.googletag.cmd || []; </script> <script type="text/javascript"> // TODO(jacob): This should be defined, may be rare load order problem. // Checking if null is just a quick fix, will default to en if unset. // Better fix is to run this immedietely after I18n is set. if (window.I18n != null) { I18n.defaultLocale = "en"; I18n.locale = "en"; I18n.fallbacks = true; } </script> <link rel="canonical" href="https://independent.academia.edu/HarisDindo" /> </head> <!--[if gte IE 9 ]> <body class='ie ie9 c-profiles/works a-summary logged_out'> <![endif]--> <!--[if !(IE) ]><!--> <body class='c-profiles/works a-summary logged_out'> <!--<![endif]--> <div id="fb-root"></div><script>window.fbAsyncInit = function() { FB.init({ appId: "2369844204", version: "v8.0", status: true, cookie: true, xfbml: true }); // Additional initialization code. if (window.InitFacebook) { // facebook.ts already loaded, set it up. window.InitFacebook(); } else { // Set a flag for facebook.ts to find when it loads. window.academiaAuthReadyFacebook = true; } };</script><script>window.fbAsyncLoad = function() { // Protection against double calling of this function if (window.FB) { return; } (function(d, s, id){ var js, fjs = d.getElementsByTagName(s)[0]; if (d.getElementById(id)) {return;} js = d.createElement(s); js.id = id; js.src = "//connect.facebook.net/en_US/sdk.js"; fjs.parentNode.insertBefore(js, fjs); }(document, 'script', 'facebook-jssdk')); } if (!window.defer_facebook) { // Autoload if not deferred window.fbAsyncLoad(); } else { // Defer loading by 5 seconds setTimeout(function() { window.fbAsyncLoad(); }, 5000); }</script> <div id="google-root"></div><script>window.loadGoogle = function() { if (window.InitGoogle) { // google.ts already loaded, set it up. window.InitGoogle("331998490334-rsn3chp12mbkiqhl6e7lu2q0mlbu0f1b"); } else { // Set a flag for google.ts to use when it loads. window.GoogleClientID = "331998490334-rsn3chp12mbkiqhl6e7lu2q0mlbu0f1b"; } };</script><script>window.googleAsyncLoad = function() { // Protection against double calling of this function (function(d) { var js; var id = 'google-jssdk'; var ref = d.getElementsByTagName('script')[0]; if (d.getElementById(id)) { return; } js = d.createElement('script'); js.id = id; js.async = true; js.onload = loadGoogle; js.src = "https://accounts.google.com/gsi/client" ref.parentNode.insertBefore(js, ref); }(document)); } if (!window.defer_google) { // Autoload if not deferred window.googleAsyncLoad(); } else { // Defer loading by 5 seconds setTimeout(function() { window.googleAsyncLoad(); }, 5000); }</script> <div id="tag-manager-body-root"> <!-- Google Tag Manager (noscript) --> <noscript><iframe src="https://www.googletagmanager.com/ns.html?id=GTM-5G9JF7Z" height="0" width="0" style="display:none;visibility:hidden"></iframe></noscript> <!-- End Google Tag Manager (noscript) --> <!-- Event listeners for analytics --> <script> window.addEventListener('load', function() { if (document.querySelector('input[name="commit"]')) { document.querySelector('input[name="commit"]').addEventListener('click', function() { gtag('event', 'click', { event_category: 'button', event_label: 'Log In' }) }) } }); </script> </div> <script>var _comscore = _comscore || []; _comscore.push({ c1: "2", c2: "26766707" }); (function() { var s = document.createElement("script"), el = document.getElementsByTagName("script")[0]; s.async = true; s.src = (document.location.protocol == "https:" ? "https://sb" : "http://b") + ".scorecardresearch.com/beacon.js"; el.parentNode.insertBefore(s, el); })();</script><img src="https://sb.scorecardresearch.com/p?c1=2&c2=26766707&cv=2.0&cj=1" style="position: absolute; visibility: hidden" /> <div id='react-modal'></div> <div class='DesignSystem'> <a class='u-showOnFocus' href='#site'> Skip to main content </a> </div> <div id="upgrade_ie_banner" style="display: none;"><p>Academia.edu no longer supports Internet Explorer.</p><p>To browse Academia.edu and the wider internet faster and more securely, please take a few seconds to <a href="https://www.academia.edu/upgrade-browser">upgrade your browser</a>.</p></div><script>// Show this banner for all versions of IE if (!!window.MSInputMethodContext || /(MSIE)/.test(navigator.userAgent)) { document.getElementById('upgrade_ie_banner').style.display = 'block'; }</script> <div class="DesignSystem bootstrap ShrinkableNav"><div class="navbar navbar-default main-header"><div class="container-wrapper" id="main-header-container"><div class="container"><div class="navbar-header"><div class="nav-left-wrapper u-mt0x"><div class="nav-logo"><a data-main-header-link-target="logo_home" href="https://www.academia.edu/"><img class="visible-xs-inline-block" style="height: 24px;" alt="Academia.edu" src="//a.academia-assets.com/images/academia-logo-redesign-2015-A.svg" width="24" height="24" /><img width="145.2" height="18" class="hidden-xs" style="height: 24px;" alt="Academia.edu" src="//a.academia-assets.com/images/academia-logo-redesign-2015.svg" /></a></div><div class="nav-search"><div class="SiteSearch-wrapper select2-no-default-pills"><form class="js-SiteSearch-form DesignSystem" action="https://www.academia.edu/search" accept-charset="UTF-8" method="get"><i class="SiteSearch-icon fa fa-search u-fw700 u-positionAbsolute u-tcGrayDark"></i><input class="js-SiteSearch-form-input SiteSearch-form-input form-control" data-main-header-click-target="search_input" name="q" placeholder="Search" type="text" value="" /></form></div></div></div><div class="nav-right-wrapper pull-right"><ul class="NavLinks js-main-nav list-unstyled"><li class="NavLinks-link"><a class="js-header-login-url Button Button--inverseGray Button--sm u-mb4x" id="nav_log_in" rel="nofollow" href="https://www.academia.edu/login">Log In</a></li><li class="NavLinks-link u-p0x"><a class="Button Button--inverseGray Button--sm u-mb4x" rel="nofollow" href="https://www.academia.edu/signup">Sign Up</a></li></ul><button class="hidden-lg hidden-md hidden-sm u-ml4x navbar-toggle collapsed" data-target=".js-mobile-header-links" data-toggle="collapse" type="button"><span class="icon-bar"></span><span class="icon-bar"></span><span class="icon-bar"></span></button></div></div><div class="collapse navbar-collapse js-mobile-header-links"><ul class="nav navbar-nav"><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/login">Log In</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/signup">Sign Up</a></li><li class="u-borderColorGrayLight u-borderBottom1 js-mobile-nav-expand-trigger"><a href="#">more <span class="caret"></span></a></li><li><ul class="js-mobile-nav-expand-section nav navbar-nav u-m0x collapse"><li class="u-borderColorGrayLight u-borderBottom1"><a rel="false" href="https://www.academia.edu/about">About</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/press">Press</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="false" href="https://www.academia.edu/documents">Papers</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/terms">Terms</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/privacy">Privacy</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/copyright">Copyright</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://www.academia.edu/hiring"><i class="fa fa-briefcase"></i> We're Hiring!</a></li><li class="u-borderColorGrayLight u-borderBottom1"><a rel="nofollow" href="https://support.academia.edu/hc/en-us"><i class="fa fa-question-circle"></i> Help Center</a></li><li class="js-mobile-nav-collapse-trigger u-borderColorGrayLight u-borderBottom1 dropup" style="display:none"><a href="#">less <span class="caret"></span></a></li></ul></li></ul></div></div></div><script>(function(){ var $moreLink = $(".js-mobile-nav-expand-trigger"); var $lessLink = $(".js-mobile-nav-collapse-trigger"); var $section = $('.js-mobile-nav-expand-section'); $moreLink.click(function(ev){ ev.preventDefault(); $moreLink.hide(); $lessLink.show(); $section.collapse('show'); }); $lessLink.click(function(ev){ ev.preventDefault(); $moreLink.show(); $lessLink.hide(); $section.collapse('hide'); }); })() if ($a.is_logged_in() || false) { new Aedu.NavigationController({ el: '.js-main-nav', showHighlightedNotification: false }); } else { $(".js-header-login-url").attr("href", $a.loginUrlWithRedirect()); } Aedu.autocompleteSearch = new AutocompleteSearch({el: '.js-SiteSearch-form'});</script></div></div> <div id='site' class='fixed'> <div id="content" class="clearfix"> <script>document.addEventListener('DOMContentLoaded', function(){ var $dismissible = $(".dismissible_banner"); $dismissible.click(function(ev) { $dismissible.hide(); }); });</script> <script src="//a.academia-assets.com/assets/webpack_bundles/profile.wjs-bundle-88442fa99c53695cbf56e2564b19eb3b40814aff3697cc34cdd3c1c7f740e0a7.js" defer="defer"></script><script>$viewedUser = Aedu.User.set_viewed( {"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo","photo":"/images/s65_no_pic.png","has_photo":false,"is_analytics_public":false,"interests":[{"id":172625,"name":"Fractal","url":"https://www.academia.edu/Documents/in/Fractal"},{"id":58648,"name":"Human Gait Analysis","url":"https://www.academia.edu/Documents/in/Human_Gait_Analysis"},{"id":23922,"name":"Dynamical systems and Chaos","url":"https://www.academia.edu/Documents/in/Dynamical_systems_and_Chaos"},{"id":1003619,"name":"Time Series Analysis and Forecasting","url":"https://www.academia.edu/Documents/in/Time_Series_Analysis_and_Forecasting"},{"id":37229,"name":"Human Movement Science","url":"https://www.academia.edu/Documents/in/Human_Movement_Science"},{"id":11136,"name":"Cognitive Robotics","url":"https://www.academia.edu/Documents/in/Cognitive_Robotics"},{"id":181785,"name":"Time Series Data","url":"https://www.academia.edu/Documents/in/Time_Series_Data"}]} ); if ($a.is_logged_in() && $viewedUser.is_current_user()) { $('body').addClass('profile-viewed-by-owner'); } $socialProfiles = []</script><div id="js-react-on-rails-context" style="display:none" data-rails-context="{"inMailer":false,"i18nLocale":"en","i18nDefaultLocale":"en","href":"https://independent.academia.edu/HarisDindo","location":"/HarisDindo","scheme":"https","host":"independent.academia.edu","port":null,"pathname":"/HarisDindo","search":null,"httpAcceptLanguage":null,"serverSide":false}"></div> <div class="js-react-on-rails-component" style="display:none" data-component-name="ProfileCheckPaperUpdate" data-props="{}" data-trace="false" data-dom-id="ProfileCheckPaperUpdate-react-component-5f7e311c-00b8-4065-9526-95199d4a525c"></div> <div id="ProfileCheckPaperUpdate-react-component-5f7e311c-00b8-4065-9526-95199d4a525c"></div> <div class="DesignSystem"><div class="onsite-ping" id="onsite-ping"></div></div><div class="profile-user-info DesignSystem"><div class="social-profile-container"><div class="left-panel-container"><div class="user-info-component-wrapper"><div class="user-summary-cta-container"><div class="user-summary-container"><div class="social-profile-avatar-container"><img class="profile-avatar u-positionAbsolute" border="0" alt="" src="//a.academia-assets.com/images/s200_no_pic.png" /></div><div class="title-container"><h1 class="ds2-5-heading-sans-serif-sm">Haris Dindo</h1><div class="affiliations-container fake-truncate js-profile-affiliations"></div></div></div><div class="sidebar-cta-container"><button class="ds2-5-button hidden profile-cta-button grow js-profile-follow-button" data-broccoli-component="user-info.follow-button" data-click-track="profile-user-info-follow-button" data-follow-user-fname="Haris" data-follow-user-id="37173809" data-follow-user-source="profile_button" data-has-google="false"><span class="material-symbols-outlined" style="font-size: 20px" translate="no">add</span>Follow</button><button class="ds2-5-button hidden profile-cta-button grow js-profile-unfollow-button" data-broccoli-component="user-info.unfollow-button" data-click-track="profile-user-info-unfollow-button" data-unfollow-user-id="37173809"><span class="material-symbols-outlined" style="font-size: 20px" translate="no">done</span>Following</button></div></div><div class="user-stats-container"><a><div class="stat-container js-profile-followers"><p class="label">Followers</p><p class="data">33</p></div></a><a><div class="stat-container js-profile-followees" data-broccoli-component="user-info.followees-count" data-click-track="profile-expand-user-info-following"><p class="label">Following</p><p class="data">17</p></div></a><a><div class="stat-container js-profile-coauthors" data-broccoli-component="user-info.coauthors-count" data-click-track="profile-expand-user-info-coauthors"><p class="label">Co-authors</p><p class="data">15</p></div></a><div class="js-mentions-count-container" style="display: none;"><a href="/HarisDindo/mentions"><div class="stat-container"><p class="label">Mentions</p><p class="data"></p></div></a></div><span><div class="stat-container"><p class="label"><span class="js-profile-total-view-text">Public Views</span></p><p class="data"><span class="js-profile-view-count"></span></p></div></span></div><div class="suggested-academics-container"><div class="suggested-academics--header"><p class="ds2-5-body-md-bold">Related Authors</p></div><ul class="suggested-user-card-list"><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://case.academia.edu/AdamCroom"><img class="profile-avatar u-positionAbsolute" alt="Adam M Croom" border="0" onerror="if (this.src != '//a.academia-assets.com/images/s200_no_pic.png') this.src = '//a.academia-assets.com/images/s200_no_pic.png';" width="200" height="200" src="https://0.academia-photos.com/145416/38521/146815278/s200_adam.croom.png" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://case.academia.edu/AdamCroom">Adam M Croom</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">Case Western Reserve University</p></div></div><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://rug.academia.edu/RaoulBongers"><img class="profile-avatar u-positionAbsolute" alt="Raoul Bongers" border="0" onerror="if (this.src != '//a.academia-assets.com/images/s200_no_pic.png') this.src = '//a.academia-assets.com/images/s200_no_pic.png';" width="200" height="200" src="https://0.academia-photos.com/334595/92725/102337/s200_raoul.bongers.jpg" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://rug.academia.edu/RaoulBongers">Raoul Bongers</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">University of Groningen</p></div></div><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://oxfordbrookes.academia.edu/FabioCuzzolin"><img class="profile-avatar u-positionAbsolute" alt="Fabio Cuzzolin" border="0" onerror="if (this.src != '//a.academia-assets.com/images/s200_no_pic.png') this.src = '//a.academia-assets.com/images/s200_no_pic.png';" width="200" height="200" src="https://0.academia-photos.com/366407/112374/61740579/s200_fabio.cuzzolin.jpg" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://oxfordbrookes.academia.edu/FabioCuzzolin">Fabio Cuzzolin</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">Oxford Brookes University</p></div></div><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://unsw.academia.edu/MassimilianoCappuccio"><img class="profile-avatar u-positionAbsolute" alt="Massimiliano L Cappuccio" border="0" onerror="if (this.src != '//a.academia-assets.com/images/s200_no_pic.png') this.src = '//a.academia-assets.com/images/s200_no_pic.png';" width="200" height="200" src="https://0.academia-photos.com/375944/116197/169667/s200_massimiliano.cappuccio.jpg" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://unsw.academia.edu/MassimilianoCappuccio">Massimiliano L Cappuccio</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">The University of New South Wales</p></div></div><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://uaar.academia.edu/AhmedImranHunjra"><img class="profile-avatar u-positionAbsolute" alt="Ahmed Imran Hunjra" border="0" onerror="if (this.src != '//a.academia-assets.com/images/s200_no_pic.png') this.src = '//a.academia-assets.com/images/s200_no_pic.png';" width="200" height="200" src="https://0.academia-photos.com/598202/209871/245081/s200_ahmed_imran.hunjra.jpg" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://uaar.academia.edu/AhmedImranHunjra">Ahmed Imran Hunjra</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">Arid Agriculture University Rawalpindi</p></div></div><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://enah.academia.edu/FernandoL%C3%B3pezAguilar"><img class="profile-avatar u-positionAbsolute" alt="Fernando López-Aguilar" border="0" onerror="if (this.src != '//a.academia-assets.com/images/s200_no_pic.png') this.src = '//a.academia-assets.com/images/s200_no_pic.png';" width="200" height="200" src="https://0.academia-photos.com/663787/231611/271216/s200_fernando.l_pez_aguilar.jpg" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://enah.academia.edu/FernandoL%C3%B3pezAguilar">Fernando López-Aguilar</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">Escuela Nacional de Antropologia e Historia</p></div></div><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://umcg.academia.edu/MarinaMSchoemaker"><img class="profile-avatar u-positionAbsolute" border="0" alt="" src="//a.academia-assets.com/images/s200_no_pic.png" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://umcg.academia.edu/MarinaMSchoemaker">Marina M. Schoemaker</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">University Medical Center Groningen</p></div></div><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://suleyman-demirel.academia.edu/Ziya%C3%96NC%C3%9C"><img class="profile-avatar u-positionAbsolute" border="0" alt="" src="//a.academia-assets.com/images/s200_no_pic.png" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://suleyman-demirel.academia.edu/Ziya%C3%96NC%C3%9C">Ziya ÖNCÜ</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">Suleyman Demirel University</p></div></div><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://su-se.academia.edu/ViacheslavKuleshov"><img class="profile-avatar u-positionAbsolute" alt="Viacheslav Kuleshov" border="0" onerror="if (this.src != '//a.academia-assets.com/images/s200_no_pic.png') this.src = '//a.academia-assets.com/images/s200_no_pic.png';" width="200" height="200" src="https://0.academia-photos.com/1895779/1756649/167070895/s200_viacheslav.kuleshov.jpg" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://su-se.academia.edu/ViacheslavKuleshov">Viacheslav Kuleshov</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">Stockholm University</p></div></div><div class="suggested-user-card"><div class="suggested-user-card__avatar social-profile-avatar-container"><a href="https://bhu-in.academia.edu/SUNJAYSUNJAY"><img class="profile-avatar u-positionAbsolute" alt="SUNJAY SUNJAY" border="0" onerror="if (this.src != '//a.academia-assets.com/images/s200_no_pic.png') this.src = '//a.academia-assets.com/images/s200_no_pic.png';" width="200" height="200" src="https://0.academia-photos.com/3178997/1045493/1305890/s200_sunjay.sunjay.jpg" /></a></div><div class="suggested-user-card__user-info"><a class="suggested-user-card__user-info__header ds2-5-body-sm-bold ds2-5-body-link" href="https://bhu-in.academia.edu/SUNJAYSUNJAY">SUNJAY SUNJAY</a><p class="suggested-user-card__user-info__subheader ds2-5-body-xs">Banaras Hindu University, Varanasi</p></div></div></ul></div><div class="ri-section"><div class="ri-section-header"><span>Interests</span><a class="ri-more-link js-profile-ri-list-card" data-click-track="profile-user-info-primary-research-interest" data-has-card-for-ri-list="37173809">View All (7)</a></div><div class="ri-tags-container"><a data-click-track="profile-user-info-expand-research-interests" data-has-card-for-ri-list="37173809" href="https://www.academia.edu/Documents/in/Fractal"><div id="js-react-on-rails-context" style="display:none" data-rails-context="{"inMailer":false,"i18nLocale":"en","i18nDefaultLocale":"en","href":"https://independent.academia.edu/HarisDindo","location":"/HarisDindo","scheme":"https","host":"independent.academia.edu","port":null,"pathname":"/HarisDindo","search":null,"httpAcceptLanguage":null,"serverSide":false}"></div> <div class="js-react-on-rails-component" style="display:none" data-component-name="Pill" data-props="{"color":"gray","children":["Fractal"]}" data-trace="false" data-dom-id="Pill-react-component-11ac9e73-9eae-466f-b357-4ad1cca16b45"></div> <div id="Pill-react-component-11ac9e73-9eae-466f-b357-4ad1cca16b45"></div> </a><a data-click-track="profile-user-info-expand-research-interests" data-has-card-for-ri-list="37173809" href="https://www.academia.edu/Documents/in/Human_Gait_Analysis"><div class="js-react-on-rails-component" style="display:none" data-component-name="Pill" data-props="{"color":"gray","children":["Human Gait Analysis"]}" data-trace="false" data-dom-id="Pill-react-component-efdfbbeb-724f-4de5-94fe-f6f59073cee1"></div> <div id="Pill-react-component-efdfbbeb-724f-4de5-94fe-f6f59073cee1"></div> </a><a data-click-track="profile-user-info-expand-research-interests" data-has-card-for-ri-list="37173809" href="https://www.academia.edu/Documents/in/Dynamical_systems_and_Chaos"><div class="js-react-on-rails-component" style="display:none" data-component-name="Pill" data-props="{"color":"gray","children":["Dynamical systems and Chaos"]}" data-trace="false" data-dom-id="Pill-react-component-fb10d3ca-3c3c-4546-8424-f6e31cbcc823"></div> <div id="Pill-react-component-fb10d3ca-3c3c-4546-8424-f6e31cbcc823"></div> </a><a data-click-track="profile-user-info-expand-research-interests" data-has-card-for-ri-list="37173809" href="https://www.academia.edu/Documents/in/Time_Series_Analysis_and_Forecasting"><div class="js-react-on-rails-component" style="display:none" data-component-name="Pill" data-props="{"color":"gray","children":["Time Series Analysis and Forecasting"]}" data-trace="false" data-dom-id="Pill-react-component-6cdf3644-9158-40e5-8907-7fa1809cc2f0"></div> <div id="Pill-react-component-6cdf3644-9158-40e5-8907-7fa1809cc2f0"></div> </a><a data-click-track="profile-user-info-expand-research-interests" data-has-card-for-ri-list="37173809" href="https://www.academia.edu/Documents/in/Human_Movement_Science"><div class="js-react-on-rails-component" style="display:none" data-component-name="Pill" data-props="{"color":"gray","children":["Human Movement Science"]}" data-trace="false" data-dom-id="Pill-react-component-5d41f4d3-41eb-4bf3-a6bd-9b080525e146"></div> <div id="Pill-react-component-5d41f4d3-41eb-4bf3-a6bd-9b080525e146"></div> </a></div></div></div></div><div class="right-panel-container"><div class="user-content-wrapper"><div class="uploads-container" id="social-redesign-work-container"><div class="upload-header"><h2 class="ds2-5-heading-sans-serif-xs">Uploads</h2></div><div class="documents-container backbone-social-profile-documents" style="width: 100%;"><div class="u-taCenter"></div><div class="profile--tab_content_container js-tab-pane tab-pane active" id="all"><div class="profile--tab_heading_container js-section-heading" data-section="Papers" id="Papers"><h3 class="profile--tab_heading_container">Papers by Haris Dindo</h3></div><div class="js-work-strip profile--work_container" data-work-id="8331686"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/8331686/Autonomous_Acquisition_of_Natural_Language"><img alt="Research paper thumbnail of Autonomous Acquisition of Natural Language" class="work-thumbnail" src="https://attachments.academia-assets.com/34736352/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/8331686/Autonomous_Acquisition_of_Natural_Language">Autonomous Acquisition of Natural Language</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://reykjavik.academia.edu/KrisitnnRTh%C3%B3risson">Kristinn R. Thórisson</a>, <a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/HarisDindo">Haris Dindo</a>, <a class="" data-click-track="profile-work-strip-authors" href="https://upm-es.academia.edu/ManuelRodriguez">Manuel Rodriguez</a>, <a class="" data-click-track="profile-work-strip-authors" href="https://upm-es.academia.edu/RicardoSanz">Ricardo Sanz</a>, <a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/HelgiHelgason">Helgi Helgason</a>, and <a class="" data-click-track="profile-work-strip-authors" href="https://hi.academia.edu/GudbergJonsson">Gudberg K Jonsson</a></span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">An important part of human intelligence is the ability to use language. Humans learn how to use l...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">An important part of human intelligence is the ability to use language. Humans learn how to use language in a society of language users, which is probably the most effective way to learn a language from the ground up. Principles that might allow an artificial agents to learn language this way are not known at present. Here we present a framework which begins to address this challenge. Our auto-catalytic, endogenous, reflective architecture (AERA) supports the creation of agents that can learn natural language by observation. We present results from two experiments where our S1 agent learns human communication by observing two humans interacting in a realtime mock television interview, using gesture and situated language. Results show that S1 can learn multimodal complex language and multimodal communicative acts, using a vocabulary of 100 words with numerous sentence formats, by observing unscripted interaction between the humans, with no grammar being provided to it a priori, and only high-level information about the format of the human interaction in the form of high-level goals of the interviewer and interviewee and a small ontology. The agent learns both the pragmatics, semantics, and syntax of complex sentences spoken by the human subjects on the topic of recycling of objects such as aluminum cans, glass bottles, plastic, and wood, as well as use of manual deictic reference and anaphora.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="cdc88d36f5cae01e068214444a2717ae" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":34736352,"asset_id":8331686,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/34736352/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="8331686"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="8331686"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 8331686; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=8331686]").text(description); $(".js-view-count[data-work-id=8331686]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 8331686; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='8331686']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "cdc88d36f5cae01e068214444a2717ae" } } $('.js-work-strip[data-work-id=8331686]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":8331686,"title":"Autonomous Acquisition of Natural Language","translated_title":"","metadata":{"abstract":"An important part of human intelligence is the ability to use language. Humans learn how to use language in a society of language users, which is probably the most effective way to learn a language from the ground up. Principles that might allow an artificial agents to learn language this way are not known at present. Here we present a framework which begins to address this challenge. Our auto-catalytic, endogenous, reflective architecture (AERA) supports the creation of agents that can learn natural language by observation. We present results from two experiments where our S1 agent learns human communication by observing two humans interacting in a realtime mock television interview, using gesture and situated language. Results show that S1 can learn multimodal complex language and multimodal communicative acts, using a vocabulary of 100 words with numerous sentence formats, by observing unscripted interaction between the humans, with no grammar being provided to it a priori, and only high-level information about the format of the human interaction in the form of high-level goals of the interviewer and interviewee and a small ontology. The agent learns both the pragmatics, semantics, and syntax of complex sentences spoken by the human subjects on the topic of recycling of objects such as aluminum cans, glass bottles, plastic, and wood, as well as use of manual deictic reference and anaphora.","ai_title_tag":"Learning Language through Human Interaction"},"translated_abstract":"An important part of human intelligence is the ability to use language. Humans learn how to use language in a society of language users, which is probably the most effective way to learn a language from the ground up. Principles that might allow an artificial agents to learn language this way are not known at present. Here we present a framework which begins to address this challenge. Our auto-catalytic, endogenous, reflective architecture (AERA) supports the creation of agents that can learn natural language by observation. We present results from two experiments where our S1 agent learns human communication by observing two humans interacting in a realtime mock television interview, using gesture and situated language. Results show that S1 can learn multimodal complex language and multimodal communicative acts, using a vocabulary of 100 words with numerous sentence formats, by observing unscripted interaction between the humans, with no grammar being provided to it a priori, and only high-level information about the format of the human interaction in the form of high-level goals of the interviewer and interviewee and a small ontology. The agent learns both the pragmatics, semantics, and syntax of complex sentences spoken by the human subjects on the topic of recycling of objects such as aluminum cans, glass bottles, plastic, and wood, as well as use of manual deictic reference and anaphora.","internal_url":"https://www.academia.edu/8331686/Autonomous_Acquisition_of_Natural_Language","translated_internal_url":"","created_at":"2014-09-15T01:00:02.979-07:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":7087,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[{"id":11997457,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":null,"co_author_invite_id":952004,"email":"d***o@csai.unipa.it","display_order":0,"name":"Haris Dindo","title":"Autonomous Acquisition of Natural Language"},{"id":11997458,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":null,"co_author_invite_id":1831181,"email":"d***e@istc.cnr.it","display_order":4194304,"name":"Dimitri Ognibene","title":"Autonomous Acquisition of Natural Language"},{"id":11997784,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":37173809,"co_author_invite_id":null,"email":"h***o@gmail.com","display_order":6291456,"name":"Haris Dindo","title":"Autonomous Acquisition of Natural Language"},{"id":11997785,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":null,"co_author_invite_id":1831180,"email":"d***e@imperial.ac.uk","display_order":7340032,"name":"Dimitri Ognibene","title":"Autonomous Acquisition of Natural Language"},{"id":12000188,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":5635344,"co_author_invite_id":null,"email":"m***r@gmail.com","affiliation":"Universidad Politécnica de Madrid","display_order":7864320,"name":"Manuel Rodriguez","title":"Autonomous Acquisition of Natural Language"},{"id":12000190,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":null,"co_author_invite_id":1831176,"email":"b***s@idsia.ch","display_order":8126464,"name":"Bas Steunebrink","title":"Autonomous Acquisition of Natural Language"},{"id":12000192,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":null,"co_author_invite_id":1290101,"email":"g***o@istc.cnr.it","display_order":8257536,"name":"Giovanni Pezzulo","title":"Autonomous Acquisition of Natural Language"},{"id":12000194,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":19635,"co_author_invite_id":null,"email":"g***o@istc.cnr.it","affiliation":"Consiglio Nazionale delle Ricerche (CNR)","display_order":8323072,"name":"Giovanni Pezzulo","title":"Autonomous Acquisition of Natural Language"},{"id":12000195,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":null,"co_author_invite_id":1813568,"email":"p***o@ip.rm.cnr.it","display_order":8355840,"name":"Giovanni Pezzulo","title":"Autonomous Acquisition of Natural Language"},{"id":12000196,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":null,"co_author_invite_id":608288,"email":"g***o@cnr.it","display_order":8372224,"name":"Giovanni Pezzulo","title":"Autonomous Acquisition of Natural Language"},{"id":12000197,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":null,"co_author_invite_id":323007,"email":"g***o@gmail.com","display_order":8380416,"name":"Giovanni Pezzulo","title":"Autonomous Acquisition of Natural Language"},{"id":12000198,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":null,"co_author_invite_id":1831179,"email":"s***z@disam.upm.es","display_order":8384512,"name":"Ricardo Sanz","title":"Autonomous Acquisition of Natural Language"},{"id":12000199,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":null,"co_author_invite_id":1831178,"email":"r***z@aslab.org","display_order":8386560,"name":"Ricardo Sanz","title":"Autonomous Acquisition of Natural Language"},{"id":12000200,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":38811,"co_author_invite_id":null,"email":"R***z@upm.es","affiliation":"Universidad Politécnica de Madrid","display_order":8387584,"name":"Ricardo Sanz","title":"Autonomous Acquisition of Natural Language"},{"id":12000201,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":34136706,"co_author_invite_id":null,"email":"r***z@gmail.com","display_order":8388096,"name":"Ricardo Sanz","title":"Autonomous Acquisition of Natural Language"},{"id":12000202,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":null,"co_author_invite_id":867126,"email":"r***z@etsii.upm.es","display_order":8388352,"name":"Ricardo Sanz","title":"Autonomous Acquisition of Natural Language"},{"id":19602886,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":41490850,"co_author_invite_id":null,"email":"h***i@perseptio.com","display_order":8388544,"name":"Helgi Helgason","title":"Autonomous Acquisition of Natural Language"},{"id":29744040,"work_id":8331686,"tagging_user_id":41490850,"tagged_user_id":44974171,"co_author_invite_id":null,"email":"g***n@hi.is","affiliation":"University of Iceland","display_order":8388576,"name":"Gudberg K Jonsson","title":"Autonomous Acquisition of Natural Language"},{"id":29744049,"work_id":8331686,"tagging_user_id":41490850,"tagged_user_id":null,"co_author_invite_id":1532486,"email":"m***z@ub.edu","display_order":8388592,"name":"M. Rodríguez","title":"Autonomous Acquisition of Natural Language"},{"id":32284586,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":null,"co_author_invite_id":6434089,"email":"c***z@upm.es","display_order":8388600,"name":"Carlos Hernández","title":"Autonomous Acquisition of Natural Language"}],"downloadable_attachments":[{"id":34736352,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/34736352/thumbnails/1.jpg","file_name":"AAoNL-wHeadr.pdf","download_url":"https://www.academia.edu/attachments/34736352/download_file","bulk_download_file_name":"Autonomous_Acquisition_of_Natural_Langua.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/34736352/AAoNL-wHeadr-libre.pdf?1410768094=\u0026response-content-disposition=attachment%3B+filename%3DAutonomous_Acquisition_of_Natural_Langua.pdf\u0026Expires=1738693944\u0026Signature=JuMm06gxQE1OMJV8Pu5dnLh7XFtXVSZ6opVGpEaMnGvuQZ2FOeLNhiHVL~P8IcBtRmKUktELQjvETUMD5daF9W17hSUfdDUuo5utFip4AtQ6O7XiZ8xszw7OLUk9jjNZoOcvBK0UEoCGwh3d~uPBvjAFCQmL1YsuUazGDbUEnlGYBuHx7xVJObs~hUd1eA9R5wkP88Maniu~~CzBB7KdOPFwzauWpFBtllpGARhdcSKcCE6btyObDyfgUvSPm549SQKYv-Ww~TSlT~eaH~ZUOHKeLmS5oITyXQ1hXxg7effMuejbMAxdttqBzxHd518ksmdrq052x9N76VhaGMdSRg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Autonomous_Acquisition_of_Natural_Language","translated_slug":"","page_count":10,"language":"en","content_type":"Work","summary":"An important part of human intelligence is the ability to use language. Humans learn how to use language in a society of language users, which is probably the most effective way to learn a language from the ground up. Principles that might allow an artificial agents to learn language this way are not known at present. Here we present a framework which begins to address this challenge. Our auto-catalytic, endogenous, reflective architecture (AERA) supports the creation of agents that can learn natural language by observation. We present results from two experiments where our S1 agent learns human communication by observing two humans interacting in a realtime mock television interview, using gesture and situated language. Results show that S1 can learn multimodal complex language and multimodal communicative acts, using a vocabulary of 100 words with numerous sentence formats, by observing unscripted interaction between the humans, with no grammar being provided to it a priori, and only high-level information about the format of the human interaction in the form of high-level goals of the interviewer and interviewee and a small ontology. The agent learns both the pragmatics, semantics, and syntax of complex sentences spoken by the human subjects on the topic of recycling of objects such as aluminum cans, glass bottles, plastic, and wood, as well as use of manual deictic reference and anaphora.","owner":{"id":7087,"first_name":"Kristinn R.","middle_initials":null,"last_name":"Thórisson","page_name":"KrisitnnRThórisson","domain_name":"reykjavik","created_at":"2008-10-16T00:55:01.197-07:00","display_name":"Kristinn R. Thórisson","url":"https://reykjavik.academia.edu/KrisitnnRTh%C3%B3risson"},"attachments":[{"id":34736352,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/34736352/thumbnails/1.jpg","file_name":"AAoNL-wHeadr.pdf","download_url":"https://www.academia.edu/attachments/34736352/download_file","bulk_download_file_name":"Autonomous_Acquisition_of_Natural_Langua.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/34736352/AAoNL-wHeadr-libre.pdf?1410768094=\u0026response-content-disposition=attachment%3B+filename%3DAutonomous_Acquisition_of_Natural_Langua.pdf\u0026Expires=1738693944\u0026Signature=JuMm06gxQE1OMJV8Pu5dnLh7XFtXVSZ6opVGpEaMnGvuQZ2FOeLNhiHVL~P8IcBtRmKUktELQjvETUMD5daF9W17hSUfdDUuo5utFip4AtQ6O7XiZ8xszw7OLUk9jjNZoOcvBK0UEoCGwh3d~uPBvjAFCQmL1YsuUazGDbUEnlGYBuHx7xVJObs~hUd1eA9R5wkP88Maniu~~CzBB7KdOPFwzauWpFBtllpGARhdcSKcCE6btyObDyfgUvSPm549SQKYv-Ww~TSlT~eaH~ZUOHKeLmS5oITyXQ1hXxg7effMuejbMAxdttqBzxHd518ksmdrq052x9N76VhaGMdSRg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":77,"name":"Robotics","url":"https://www.academia.edu/Documents/in/Robotics"},{"id":78,"name":"Control Systems Engineering","url":"https://www.academia.edu/Documents/in/Control_Systems_Engineering"},{"id":237,"name":"Cognitive Science","url":"https://www.academia.edu/Documents/in/Cognitive_Science"},{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence"},{"id":1432,"name":"Natural Language Processing","url":"https://www.academia.edu/Documents/in/Natural_Language_Processing"},{"id":11136,"name":"Cognitive Robotics","url":"https://www.academia.edu/Documents/in/Cognitive_Robotics"},{"id":12428,"name":"Automatic Control","url":"https://www.academia.edu/Documents/in/Automatic_Control"},{"id":14197,"name":"Autonomy","url":"https://www.academia.edu/Documents/in/Autonomy"},{"id":25271,"name":"Artificial General Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_General_Intelligence"}],"urls":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="20159379"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/20159379/Bounded_Recursive_Self_Improvement_RU_TR_13006"><img alt="Research paper thumbnail of Bounded.Recursive.Self.Improvement.RU.TR-13006" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/20159379/Bounded_Recursive_Self_Improvement_RU_TR_13006">Bounded.Recursive.Self.Improvement.RU.TR-13006</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://reykjavik.academia.edu/KrisitnnRTh%C3%B3risson">Kristinn R. Thórisson</a>, <a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/EricNivel">Eric Nivel</a>, <a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/HarisDindo">Haris Dindo</a>, and <a class="" data-click-track="profile-work-strip-authors" href="https://tudelft.academia.edu/CarlosHern%C3%A1ndezCorbato">Carlos Hernández Corbato</a></span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="20159379"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="20159379"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 20159379; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=20159379]").text(description); $(".js-view-count[data-work-id=20159379]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 20159379; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='20159379']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=20159379]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":20159379,"title":"Bounded.Recursive.Self.Improvement.RU.TR-13006","internal_url":"https://www.academia.edu/20159379/Bounded_Recursive_Self_Improvement_RU_TR_13006","owner_id":41269104,"coauthors_can_edit":true,"owner":{"id":41269104,"first_name":"Eric","middle_initials":null,"last_name":"Nivel","page_name":"EricNivel","domain_name":"independent","created_at":"2016-01-11T03:29:37.134-08:00","display_name":"Eric Nivel","url":"https://independent.academia.edu/EricNivel"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="20159382"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/20159382/Bounded_Seed_AGI"><img alt="Research paper thumbnail of Bounded Seed-AGI" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/20159382/Bounded_Seed_AGI">Bounded Seed-AGI</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/EricNivel">Eric Nivel</a>, <a class="" data-click-track="profile-work-strip-authors" href="https://reykjavik.academia.edu/KrisitnnRTh%C3%B3risson">Kristinn R. Thórisson</a>, and <a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/HarisDindo">Haris Dindo</a></span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Four principal features of autonomous control systems are left both unaddressed and unaddressable...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Four principal features of autonomous control systems are left both unaddressed and unaddressable by present-day engineering methodologies: (1) The ability to operate effectively in environments that are only partially known at design time; (2) A level of generality that allows a system to reassess and redefine the fulfillment of its mission in light of unexpected constraints or other unforeseen changes in the environment; (3) The ability to operate effectively in environments of significant complexity; and (4) The ability to degrade gracefully— how it can continue striving to achieve its main goals when resources become scarce, or in light of other expected or unexpected constraining factors that impede its progress. We describe new methodological and engineering principles for addressing these shortcomings, that we have used to design a machine that becomes increasingly better at behaving in underspecified circumstances, in a goal-directed way, on the job, by modeling itself and i...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="20159382"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="20159382"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 20159382; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=20159382]").text(description); $(".js-view-count[data-work-id=20159382]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 20159382; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='20159382']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=20159382]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":20159382,"title":"Bounded Seed-AGI","internal_url":"https://www.academia.edu/20159382/Bounded_Seed_AGI","owner_id":41269104,"coauthors_can_edit":true,"owner":{"id":41269104,"first_name":"Eric","middle_initials":null,"last_name":"Nivel","page_name":"EricNivel","domain_name":"independent","created_at":"2016-01-11T03:29:37.134-08:00","display_name":"Eric Nivel","url":"https://independent.academia.edu/EricNivel"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441134"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/17441134/Filling_in_Gaps_in_Textured_Images_Using_Bit_Plane_Statistics"><img alt="Research paper thumbnail of Filling-in Gaps in Textured Images Using Bit-Plane Statistics" class="work-thumbnail" src="https://attachments.academia-assets.com/39509771/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/17441134/Filling_in_Gaps_in_Textured_Images_Using_Bit_Plane_Statistics">Filling-in Gaps in Textured Images Using Bit-Plane Statistics</a></div><div class="wp-workCard_item"><span>Computer Vision Theory and Applications</span><span>, 2008</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">In this paper we propose a novel approach for the texture analysis-synthesis problem, with the pu...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">In this paper we propose a novel approach for the texture analysis-synthesis problem, with the purpose to restore missing zones in greyscale images. Bit-plane decomposition is used, and a dictionary is build with bit-blocks statistics for each plane. Gaps are reconstructed with a conditional stochastic process, to propagate texture global features into the damaged area, using information stored in the dictionary. Our restoration method is simple, easy and fast, with very good results for a large set of textured images. Results are compared with a state-of-the-art restoration algorithm.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="11b671494b9919947328f42b71293aae" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":39509771,"asset_id":17441134,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/39509771/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441134"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441134"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441134; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441134]").text(description); $(".js-view-count[data-work-id=17441134]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441134; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441134']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "11b671494b9919947328f42b71293aae" } } $('.js-work-strip[data-work-id=17441134]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441134,"title":"Filling-in Gaps in Textured Images Using Bit-Plane Statistics","internal_url":"https://www.academia.edu/17441134/Filling_in_Gaps_in_Textured_Images_Using_Bit_Plane_Statistics","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[{"id":39509771,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/39509771/thumbnails/1.jpg","file_name":"Filling-in_Gaps_in_Textured_Images_Using20151028-27290-zwcj0i.pdf","download_url":"https://www.academia.edu/attachments/39509771/download_file","bulk_download_file_name":"Filling_in_Gaps_in_Textured_Images_Using.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/39509771/Filling-in_Gaps_in_Textured_Images_Using20151028-27290-zwcj0i-libre.pdf?1446077458=\u0026response-content-disposition=attachment%3B+filename%3DFilling_in_Gaps_in_Textured_Images_Using.pdf\u0026Expires=1741087463\u0026Signature=hG2DmKsE1eLqV8UQBlPoTaqufnoDMhu1AElAFl84EI~d5yKYa576Zi6CLRchNss3-08tts2gAHl72dAmROvRZTJw2KdK2eCi9vzsdnd8qfjrzJclWn~hle5QrgbCzqqBFokJSUvfsv6ibToY4BMQkzWGecdyfgs4V7WUv-f7h4kr7vdaYOUn6XDI6AHUIjPqvHQ5Ubi8sxuKDzamO6wZyMLmMgql6Ybi0kb6VlaAQK328plRwHbqOe0W1Af4begL~mFC8SuCAq5BKbav8dFDT3MJTZRM9TYWvRoQTd8VPRm07NeuHXc9SDqbS0~A0zmT4AIO0BE-1yGAuHvlbDXtwA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="14525320"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/14525320/The_intentional_stance_as_structure_learning_a_computational_perspective_on_mindreading"><img alt="Research paper thumbnail of The intentional stance as structure learning: a computational perspective on mindreading" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/14525320/The_intentional_stance_as_structure_learning_a_computational_perspective_on_mindreading">The intentional stance as structure learning: a computational perspective on mindreading</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/HarisDindo">Haris Dindo</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://cnr-it.academia.edu/FrancescoDonnarumma">Francesco Donnarumma</a></span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Recent theories of mindreading explain the recognition of action, intention, and belief of other ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Recent theories of mindreading explain the recognition of action, intention, and belief of other agents in terms of generative architectures that model the causal relations between observables (e.g., observed movements) and their hidden causes (e.g., action goals and beliefs). Two kinds of probabilistic generative schemes have been proposed in cognitive science and robotics that link to a “theory theory” and “simulation theory” of mindreading, respectively. The former compares perceived actions to optimal plans derived from rationality principles and conceptual theories of others’ minds. The latter reuses one’s own internal (inverse and forward) models for action execution to perform a look-ahead mental simulation of perceived actions. Both theories, however, leave one question unanswered: how are the generative models – including task structure and parameters – learned in the first place? We start from Dennett’s “intentional stance” proposal and characterize it within generative theories of action and intention recognition. We propose that humans use an intentional stance as a learning bias that sidesteps the (hard) structure learning problem and bootstraps the acquisition of generative models for others’ actions. The intentional stance corresponds to a candidate structure in the generative scheme, which encodes a simplified belief-desire folk psychology and a hierarchical intention-to-action organization of behavior. This simple structure can be used as a proxy for the “true” generative structure of others’ actions and intentions and is continuously grown and refined – via state and parameter learning – during interactions. In turn – as our computational simulations show – this can help solve mindreading problems and bootstrap the acquisition of useful causal models of both one’s own and others’ goal-directed actions.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="14525320"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="14525320"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 14525320; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=14525320]").text(description); $(".js-view-count[data-work-id=14525320]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 14525320; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='14525320']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=14525320]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":14525320,"title":"The intentional stance as structure learning: a computational perspective on mindreading","internal_url":"https://www.academia.edu/14525320/The_intentional_stance_as_structure_learning_a_computational_perspective_on_mindreading","owner_id":1770564,"coauthors_can_edit":true,"owner":{"id":1770564,"first_name":"Francesco","middle_initials":null,"last_name":"Donnarumma","page_name":"FrancescoDonnarumma","domain_name":"cnr-it","created_at":"2012-05-17T23:38:07.273-07:00","display_name":"Francesco Donnarumma","url":"https://cnr-it.academia.edu/FrancescoDonnarumma"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441133"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/17441133/A_Cognitive_Framework_for_Imitation_Learning"><img alt="Research paper thumbnail of A Cognitive Framework for Imitation Learning" class="work-thumbnail" src="https://attachments.academia-assets.com/39509753/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/17441133/A_Cognitive_Framework_for_Imitation_Learning">A Cognitive Framework for Imitation Learning</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">In order to have a robotic system able to effectively learn by imitation, and not merely reproduc...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">In order to have a robotic system able to effectively learn by imitation, and not merely reproduce the movements of a human teacher, the system should have the capabilities of deeply understanding the perceived actions to be imitated. This paper deals with the development of cognitive architecture for learning by imitation in which a rich conceptual representation of the observed</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="6e63153fa0736694d7cbd23f6a58654f" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":39509753,"asset_id":17441133,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/39509753/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441133"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441133"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441133; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441133]").text(description); $(".js-view-count[data-work-id=17441133]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441133; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441133']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "6e63153fa0736694d7cbd23f6a58654f" } } $('.js-work-strip[data-work-id=17441133]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441133,"title":"A Cognitive Framework for Imitation Learning","internal_url":"https://www.academia.edu/17441133/A_Cognitive_Framework_for_Imitation_Learning","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[{"id":39509753,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/39509753/thumbnails/1.jpg","file_name":"j.robot.2006.01.008.pdf20151028-2740-1fok3ko","download_url":"https://www.academia.edu/attachments/39509753/download_file","bulk_download_file_name":"A_Cognitive_Framework_for_Imitation_Lear.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/39509753/j.robot.2006.01.008-libre.pdf20151028-2740-1fok3ko?1446077459=\u0026response-content-disposition=attachment%3B+filename%3DA_Cognitive_Framework_for_Imitation_Lear.pdf\u0026Expires=1741087463\u0026Signature=RMdZHLb9Oqs6PCI5GHzePz9c3f0q-7J7~rjAslHmH91~FABws6zU-PNwrpCWNQK4ACKCRgu-m09NMgvWAD3HSD~30KEh5Cwks~YfqG-UXv6oIFfycFhsJ4GB236XQMNpxZaJms~IhDD0RWzeOTh5-m7Ge~I0F4F0~igwGygwRw2TJu9BxAu8wneCYih-55VsYuU5Aty0kbdUVf3jXVAawufUQSSgNS5poRHtTv-9zLkPokXKiXF9phNlahzCM2tdjhPKIGTz5KBeSB-AlS3dkFOvHVF-poT8cIB1Zt19CbrID0VI6HqFRgdHTnmi~U5R5pbBllQMswR7qR697DeBug__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441132"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/17441132/A_SET_OF_LOW_LEVEL_DESCRIPTORS_FOR_IMAGES_AFFECTED_BY_FOXING"><img alt="Research paper thumbnail of A SET OF LOW-LEVEL DESCRIPTORS FOR IMAGES AFFECTED BY FOXING" class="work-thumbnail" src="https://attachments.academia-assets.com/39509759/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/17441132/A_SET_OF_LOW_LEVEL_DESCRIPTORS_FOR_IMAGES_AFFECTED_BY_FOXING">A SET OF LOW-LEVEL DESCRIPTORS FOR IMAGES AFFECTED BY FOXING</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Old printed photos are affected by several typical damages, due to age and bad preservation. &quo...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Old printed photos are affected by several typical damages, due to age and bad preservation. &quot;Foxing&quot; defects look like red-brownish spots onto the paper of the printed photo. Similar features can be seen in the digitized copies. In this paper we propose a set of low level descriptors to extract features from digitized photos affected by foxing. An image retrieval</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="99b08d18007771151084f98634563059" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":39509759,"asset_id":17441132,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/39509759/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441132"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441132"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441132; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441132]").text(description); $(".js-view-count[data-work-id=17441132]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441132; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441132']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "99b08d18007771151084f98634563059" } } $('.js-work-strip[data-work-id=17441132]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441132,"title":"A SET OF LOW-LEVEL DESCRIPTORS FOR IMAGES AFFECTED BY FOXING","internal_url":"https://www.academia.edu/17441132/A_SET_OF_LOW_LEVEL_DESCRIPTORS_FOR_IMAGES_AFFECTED_BY_FOXING","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[{"id":39509759,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/39509759/thumbnails/1.jpg","file_name":"A_set_of_low-level_descriptors_for_image20151028-18389-1xtbeaq.pdf","download_url":"https://www.academia.edu/attachments/39509759/download_file","bulk_download_file_name":"A_SET_OF_LOW_LEVEL_DESCRIPTORS_FOR_IMAGE.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/39509759/A_set_of_low-level_descriptors_for_image20151028-18389-1xtbeaq-libre.pdf?1446077458=\u0026response-content-disposition=attachment%3B+filename%3DA_SET_OF_LOW_LEVEL_DESCRIPTORS_FOR_IMAGE.pdf\u0026Expires=1741087463\u0026Signature=fnvsOsjrWVsg5odTweApNbFzcScTX07G1p8Dvkm-GGD78WJJZJaKS5HmgrDaHdVkMHs4MUwBaQaKUC25~pExbZnUOQGA86eELd0Jg27YvjO0dxQyyzctL~oJiux3mx56OKHxrrlZDGaYmrE-HqWB-WBbFeyySJptDtQucJR4Y2x60Zd-~e2vQh3spdw8zyImrXzVJsiqfyJ5mz6626nXGivMxD1sEKuxVBZeEbWurQTqdNQFyq3cfP5kUbg4~Vc6BBXEPZMVEcSzKjXEEMxHZVi8Xy--cdsvUrgeb2OfSOvM9i3-WibLvAzMYUBV~7w81HnAWjJ84eJbvMPDm5o0rg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441131"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/17441131/Real_Time_Visual_Grasp_Synthesis_Using_Genetic_Algorithms_and_Neural_Networks"><img alt="Research paper thumbnail of Real-Time Visual Grasp Synthesis Using Genetic Algorithms and Neural Networks" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/17441131/Real_Time_Visual_Grasp_Synthesis_Using_Genetic_Algorithms_and_Neural_Networks">Real-Time Visual Grasp Synthesis Using Genetic Algorithms and Neural Networks</a></div><div class="wp-workCard_item"><span>Lecture Notes in Computer Science</span><span>, 2007</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">This paper addresses the problem of automatic grasp synthesis of unknown planar objects. In other...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">This paper addresses the problem of automatic grasp synthesis of unknown planar objects. In other words, we must compute points on the object&amp;amp;amp;amp;amp;amp;amp;#x27;s boundary to be reached by the robotic fingers such that the resulting grasp, among infinite possibilities, optimizes some given criteria. Objects to be grasped are represented as superellipses, a family of deformable 2D parametric functions. They can model a large variety of shapes occurring often in practice by changing a small number of parameters. The space of possible grasp ...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441131"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441131"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441131; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441131]").text(description); $(".js-view-count[data-work-id=17441131]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441131; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441131']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=17441131]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441131,"title":"Real-Time Visual Grasp Synthesis Using Genetic Algorithms and Neural Networks","internal_url":"https://www.academia.edu/17441131/Real_Time_Visual_Grasp_Synthesis_Using_Genetic_Algorithms_and_Neural_Networks","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441130"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/17441130/An_architecture_for_observational_learning_and_decision_making_based_on_internal_models"><img alt="Research paper thumbnail of An architecture for observational learning and decision making based on internal models" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/17441130/An_architecture_for_observational_learning_and_decision_making_based_on_internal_models">An architecture for observational learning and decision making based on internal models</a></div><div class="wp-workCard_item"><span>Biologically Inspired Cognitive Architectures</span><span>, 2013</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">ABSTRACT We present a cognitive architecture whose main constituents are allowed to grow through ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">ABSTRACT We present a cognitive architecture whose main constituents are allowed to grow through a situated experience in the world. Such an architectural growth is bootstrapped from a minimal initial knowledge and the architecture itself is built around the biologically-inspired notion of internal models. The key idea, supported by findings in cognitive neuroscience, is that the same internal models used in overt goal-directed action execution can be covertly re-enacted in simulation to provide a unifying explanation to a number of apparently unrelated individual and social phenomena, such as state estimation, action and intention understanding, imitation learning and mindreading. Thus, rather than reasoning over abstract symbols, we rely on the biologically plausible processes firmly grounded in the actual sensorimotor experience of the agent. The article describes how such internal models are learned in the first place, either through individual experience or by observing and imitating other skilled agents, and how they are used in action planning and execution. Furthermore, we explain how the architecture continuously adapts its internal agency and how increasingly complex cognitive phenomena, such as continuous learning, prediction and anticipation, result from an interplay of simpler principles. We describe an early evaluation of our approach in a classical AI problem-solving domain: the Sokoban puzzle.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441130"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441130"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441130; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441130]").text(description); $(".js-view-count[data-work-id=17441130]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441130; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441130']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=17441130]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441130,"title":"An architecture for observational learning and decision making based on internal models","internal_url":"https://www.academia.edu/17441130/An_architecture_for_observational_learning_and_decision_making_based_on_internal_models","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441129"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/17441129/Anchoring_by_Imitation_Learning_in_Conceptual_Spaces"><img alt="Research paper thumbnail of Anchoring by Imitation Learning in Conceptual Spaces" class="work-thumbnail" src="https://attachments.academia-assets.com/39509778/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/17441129/Anchoring_by_Imitation_Learning_in_Conceptual_Spaces">Anchoring by Imitation Learning in Conceptual Spaces</a></div><div class="wp-workCard_item"><span>Congress of the Italian Association for Artificial Intelligence</span><span>, 2005</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">In order to have a robotic system able to effectively learn by imita- tion, and not merely reprod...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">In order to have a robotic system able to effectively learn by imita- tion, and not merely reproduce the movements of a human teacher, the system should have the capabilities of deeply understanding the perceived actions to be imitated. This paper deals with the development of a cognitive architecture for learning by imitation in which a rich conceptual representation of</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="cd7cf2ebc5b3c286f411c5e1fd88863e" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":39509778,"asset_id":17441129,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/39509778/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441129"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441129"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441129; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441129]").text(description); $(".js-view-count[data-work-id=17441129]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441129; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441129']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "cd7cf2ebc5b3c286f411c5e1fd88863e" } } $('.js-work-strip[data-work-id=17441129]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441129,"title":"Anchoring by Imitation Learning in Conceptual Spaces","internal_url":"https://www.academia.edu/17441129/Anchoring_by_Imitation_Learning_in_Conceptual_Spaces","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[{"id":39509778,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/39509778/thumbnails/1.jpg","file_name":"Anchoring_by_Imitation_Learning_in_Conce20151028-27290-1d5ckys.pdf","download_url":"https://www.academia.edu/attachments/39509778/download_file","bulk_download_file_name":"Anchoring_by_Imitation_Learning_in_Conce.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/39509778/Anchoring_by_Imitation_Learning_in_Conce20151028-27290-1d5ckys-libre.pdf?1446077458=\u0026response-content-disposition=attachment%3B+filename%3DAnchoring_by_Imitation_Learning_in_Conce.pdf\u0026Expires=1741087463\u0026Signature=Psn2wMP4IUhhUiMpLkqRAsW28BQyXRKQ0VuGQug7mudkW4O6cftWx8zMM2L3po7eSBrRWRGso0P1AbwHrcsDnAxo0Hepa8YFDJfb1BeDLXDp9hSRxVItLg8~DfV5CqvIR9ryJfMiZnWEscW4x3yzhPf1aMdIY~AHRorEAAMkRuhRbgSmei5LO~oghAg1rbXuOGQbks8ZKwyKQWa1ZwSehPuQ1KkBAZu9V8XzwlNWmu0UJlHRg2HYbxqZ7TOTzwiHElf5KEvLDH8ISUfHL3G1j-i4foybjRiJu2EdnuL6CYUSoiLX7AA6nPHljFO9meFl74On6Dlp4wFYzyLUJrhRCQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441128"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/17441128/An_adaptive_probabilistic_graphical_model_for_representing_skills_in_PbD_settings"><img alt="Research paper thumbnail of An adaptive probabilistic graphical model for representing skills in PbD settings" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/17441128/An_adaptive_probabilistic_graphical_model_for_representing_skills_in_PbD_settings">An adaptive probabilistic graphical model for representing skills in PbD settings</a></div><div class="wp-workCard_item"><span>2010 5th ACM/IEEE International Conference on Human-Robot Interaction (HRI)</span><span>, 2010</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">AbstractUnderstanding and efficiently representing skills is one of the most important problems ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">AbstractUnderstanding and efficiently representing skills is one of the most important problems in a general Programming by Demonstration (PbD) paradigm. We present Growing Hierarchi-cal Dynamic Bayesian Networks (GHDBN), an adaptive variant of the general ...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441128"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441128"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441128; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441128]").text(description); $(".js-view-count[data-work-id=17441128]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441128; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441128']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=17441128]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441128,"title":"An adaptive probabilistic graphical model for representing skills in PbD settings","internal_url":"https://www.academia.edu/17441128/An_adaptive_probabilistic_graphical_model_for_representing_skills_in_PbD_settings","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441127"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/17441127/Learning_high_level_tasks_through_imitation"><img alt="Research paper thumbnail of Learning high-level tasks through imitation" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/17441127/Learning_high_level_tasks_through_imitation">Learning high-level tasks through imitation</a></div><div class="wp-workCard_item"><span>International Conference on Intelligent RObots and Systems - IROS</span><span>, 2006</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">This paper presents the cognitive architecture Con-SCIS (conceptual space based cognitive imitati...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">This paper presents the cognitive architecture Con-SCIS (conceptual space based cognitive imitation system), which tightly links low-level data processing with knowledge representation in the context of imitation learning. We use the word imitate to refer to the paradigm of program-level imitation: we are interested in the final effects of actions on objects, and not on the particular kinematic or dynamic</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441127"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441127"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441127; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441127]").text(description); $(".js-view-count[data-work-id=17441127]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441127; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441127']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=17441127]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441127,"title":"Learning high-level tasks through imitation","internal_url":"https://www.academia.edu/17441127/Learning_high_level_tasks_through_imitation","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441126"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/17441126/Anchoring_by_Imitation_Learning_in_Conceptual_Spaces"><img alt="Research paper thumbnail of Anchoring by Imitation Learning in Conceptual Spaces" class="work-thumbnail" src="https://attachments.academia-assets.com/39509761/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/17441126/Anchoring_by_Imitation_Learning_in_Conceptual_Spaces">Anchoring by Imitation Learning in Conceptual Spaces</a></div><div class="wp-workCard_item"><span>Lecture Notes in Computer Science</span><span>, 2005</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">In order to have a robotic system able to effectively learn by imitation, and not merely reproduc...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">In order to have a robotic system able to effectively learn by imitation, and not merely reproduce the movements of a human teacher, the system should have the capabilities of deeply understanding the perceived actions to be imitated. This paper deals with the development of a cognitive architecture for learning by imitation in which a rich conceptual representation of the observed actions is built. The purpose of the following discussion is to show how the same conceptual representation can be used both in a bottom-up approach, in order to learn sequences of actions by imitation learning paradigm, and in a topdown approach, in order to anchor the symbolical representations to the perceptual activities of the robotic system. The proposed architecture has been tested on the robotic system composed of a PUMA 200 industrial manipulator and an anthropomorphic robotic hand. The system demonstrated the ability to learn and imitate a set of movement primitives acquired through the vision system for simple manipulative purposes.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="5b8e391db144bfe709f80506b4d6bfa0" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":39509761,"asset_id":17441126,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/39509761/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441126"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441126"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441126; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441126]").text(description); $(".js-view-count[data-work-id=17441126]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441126; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441126']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "5b8e391db144bfe709f80506b4d6bfa0" } } $('.js-work-strip[data-work-id=17441126]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441126,"title":"Anchoring by Imitation Learning in Conceptual Spaces","internal_url":"https://www.academia.edu/17441126/Anchoring_by_Imitation_Learning_in_Conceptual_Spaces","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[{"id":39509761,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/39509761/thumbnails/1.jpg","file_name":"Anchoring_by_Imitation_Learning_in_Conce20151028-16072-6vu3zb.pdf","download_url":"https://www.academia.edu/attachments/39509761/download_file","bulk_download_file_name":"Anchoring_by_Imitation_Learning_in_Conce.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/39509761/Anchoring_by_Imitation_Learning_in_Conce20151028-16072-6vu3zb-libre.pdf?1446077459=\u0026response-content-disposition=attachment%3B+filename%3DAnchoring_by_Imitation_Learning_in_Conce.pdf\u0026Expires=1741087463\u0026Signature=IIfNXlb5mA2thfuDfQ1VeuwSlCBvveIogmpdf07Z7SQVQaLt1Gv1mqupM~j6viHTQueh6iqe9vncV2DVIABbQaOQryqtdoJSEC2NqBnDFET6skyHDNsxivpRH24rSmQyCHozKYPUqMA42Iqx~Rs7pPMc4N8F8gRc8t-Hs28~EEoYnITrUq2MrTrivhHCPg-I4PbjohBkrfNKpkt7-9wNuJTEKMmOuB9mxR9JtKBthIYLUJ20Kdg1kOkcy7F~aKSmIB6FvfLWBnhq~6NmLkAiByvVobWGg7oVuwy-Zmx9bXBORLCBwmSx1keTM6gncvThDUaVRmKeUUFWyP7e~tut7A__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441125"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/17441125/A_cognitive_approach_to_goal_level_imitation"><img alt="Research paper thumbnail of A cognitive approach to goal-level imitation" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/17441125/A_cognitive_approach_to_goal_level_imitation">A cognitive approach to goal-level imitation</a></div><div class="wp-workCard_item"><span>Interaction Studies</span><span>, 2008</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">... 306 Antonio Chella, Haris Dindo and Ignazio Infantino of each detected object. Objects are mo...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">... 306 Antonio Chella, Haris Dindo and Ignazio Infantino of each detected object. Objects are modelled with superellipses, a natural generali-sation of ellipses. They can model a large variety of regular shapes by changing a small number of parameters (Jaklic, Leonardis, &amp;amp;amp;amp; ...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441125"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441125"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441125; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441125]").text(description); $(".js-view-count[data-work-id=17441125]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441125; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441125']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=17441125]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441125,"title":"A cognitive approach to goal-level imitation","internal_url":"https://www.academia.edu/17441125/A_cognitive_approach_to_goal_level_imitation","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441124"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/17441124/People_Tracking_and_Posture_Recognition_for_Human_Robot_Interaction"><img alt="Research paper thumbnail of People Tracking and Posture Recognition for Human-Robot Interaction" class="work-thumbnail" src="https://attachments.academia-assets.com/39509726/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/17441124/People_Tracking_and_Posture_Recognition_for_Human_Robot_Interaction">People Tracking and Posture Recognition for Human-Robot Interaction</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The paper deals with a system for simultaneous people tracking and posture recognition in clutter...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The paper deals with a system for simultaneous people tracking and posture recognition in cluttered environments in the context of human-robot interaction. We adopt no particular assumptions on the movement of a person nor on its appearance, making the system suitable to several real-world applications. The system can be roughly subdivided into two highly correlated phases: tracking and recognition. The</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="c15573169ce77e0e5db28a28bb6788f2" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":39509726,"asset_id":17441124,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/39509726/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441124"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441124"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441124; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441124]").text(description); $(".js-view-count[data-work-id=17441124]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441124; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441124']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "c15573169ce77e0e5db28a28bb6788f2" } } $('.js-work-strip[data-work-id=17441124]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441124,"title":"People Tracking and Posture Recognition for Human-Robot Interaction","internal_url":"https://www.academia.edu/17441124/People_Tracking_and_Posture_Recognition_for_Human_Robot_Interaction","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[{"id":39509726,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/39509726/thumbnails/1.jpg","file_name":"HRI06_3.pdf","download_url":"https://www.academia.edu/attachments/39509726/download_file","bulk_download_file_name":"People_Tracking_and_Posture_Recognition.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/39509726/HRI06_3-libre.pdf?1446077463=\u0026response-content-disposition=attachment%3B+filename%3DPeople_Tracking_and_Posture_Recognition.pdf\u0026Expires=1741087463\u0026Signature=WXClnpoe0CXMr52565cqMTDHXqI~Z4kwxpWCHZMEg1cG7D2vFU2Uh9CMn-iveXqGv3~bxQh1zVUsSveSNW2B5BXHlG5NJHbgyPbz3uQ5GS9sOM6GHcSUERMr9hxAbAsMzSpxeEvLvr4dJ95GCvt06GmUZhqORCZoNoQjExfUgt2oXa4iLGwaN-Mk1dQlvToz~FlciJR9-1Ai6GzmjuW4PpcWz~4Lu6BJKbKjlcd6z0m4qF5C7GO0Jrq453aPkHclc9QPa6O~vQuQNiigosmN57BC--JfmSTC3ch0SPV23v4KEsQFWKamDdXcV1Ze9RRl9e6GfqplXWTDbLdCFL4kkg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"},{"id":39509727,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/39509727/thumbnails/1.jpg","file_name":"HRI06_3.pdf","download_url":"https://www.academia.edu/attachments/39509727/download_file","bulk_download_file_name":"People_Tracking_and_Posture_Recognition.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/39509727/HRI06_3-libre.pdf?1446077463=\u0026response-content-disposition=attachment%3B+filename%3DPeople_Tracking_and_Posture_Recognition.pdf\u0026Expires=1741087463\u0026Signature=PryQ~8Y2frEF6QdcudCylZvVhcka1-DISUHhkmPnR4ETLEGvb-RzAh6VY-kwXmcXLjbaLagkgglEhLdxFjxNgPeQ2Dgo0SuKX~jeCi5~nXBJGgfonvITve1B9jMdel9AHLkwk88AdxljIBBv7K-OaGfLFqpHtj5Bty6wEwE7HFxCLsZqMEiPIZvXT0Dm0-vn0NHJCxRE97D9j2qayYg3DLRsfcJUFme0NxJcw57FKJpNJlVU~8GMTGMfasv3-3EqsMG5AUiW~EYV4GxzuPYGU95TPyX4-qWBn5RO0r5vOie95U8tybZTuPoOIgGXpht3Z4YrNNhZJMmvW-eVnps9rA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441123"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/17441123/A_Cognitive_Framework_for_Learning_by_Imitation"><img alt="Research paper thumbnail of A Cognitive Framework for Learning by Imitation" class="work-thumbnail" src="https://attachments.academia-assets.com/39509766/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/17441123/A_Cognitive_Framework_for_Learning_by_Imitation">A Cognitive Framework for Learning by Imitation</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">In order to have a robotic system able to effectively learn by imitation, and not merely reproduc...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">In order to have a robotic system able to effectively learn by imitation, and not merely reproduce the movements of a human teacher, the system should have the capabilities of deeply understanding the perceived actions to be imitated. This paper deals with the development of a cognitive framework for learning by imitation in which a rich conceptual representation of the</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="6fc31d6538ebc10f7dc5c7d7eea550fe" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":39509766,"asset_id":17441123,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/39509766/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441123"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441123"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441123; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441123]").text(description); $(".js-view-count[data-work-id=17441123]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441123; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441123']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "6fc31d6538ebc10f7dc5c7d7eea550fe" } } $('.js-work-strip[data-work-id=17441123]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441123,"title":"A Cognitive Framework for Learning by Imitation","internal_url":"https://www.academia.edu/17441123/A_Cognitive_Framework_for_Learning_by_Imitation","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[{"id":39509766,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/39509766/thumbnails/1.jpg","file_name":"A_Cognitive_Framework_for_Learning_by_Im20151028-27290-1emzm9y.pdf","download_url":"https://www.academia.edu/attachments/39509766/download_file","bulk_download_file_name":"A_Cognitive_Framework_for_Learning_by_Im.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/39509766/A_Cognitive_Framework_for_Learning_by_Im20151028-27290-1emzm9y-libre.pdf?1446077459=\u0026response-content-disposition=attachment%3B+filename%3DA_Cognitive_Framework_for_Learning_by_Im.pdf\u0026Expires=1741087463\u0026Signature=ISCYAHtXa0NmZEyzfKfi0WtifGdfo9Tj9qnrr7fg9Ha0E-FzGmvCqRd7X01LrHRcWEcXaYXC5dCxdFJ9VYqCpL7Wq22kRg4fwaLjrpVW9q8X2X2y~pBJynDbR1wTXQl9-r5bQhNm~y~8Nj9kxeQYFlpKC6I7MsRrlYY7-m~3CIdo~xTgPoq7noWoskybFEcjvZV9sdegfCBL9OUDMRBm4Ls6HkVY7aP6OL7TizpYM4HuPS-FjlCGD6eGOUrEyqCSdXrEUp~pKdJuqOZUmlAUn6T7QQY2gKV~HaCdDu~OvE7f5Irs23weAV9-QFUvzYS0nZZ330hAqAqQOllutxOKVw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441122"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/17441122/A_System_for_Simultaneous_People_Tracking_and_Posture_Recognition_in_the_context_of_Human_Computer_Interaction"><img alt="Research paper thumbnail of A System for Simultaneous People Tracking and Posture Recognition in the context of Human-Computer Interaction" class="work-thumbnail" src="https://attachments.academia-assets.com/42259707/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/17441122/A_System_for_Simultaneous_People_Tracking_and_Posture_Recognition_in_the_context_of_Human_Computer_Interaction">A System for Simultaneous People Tracking and Posture Recognition in the context of Human-Computer Interaction</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The paper deals with an artificial-vision based system for simultaneous people tracking and postu...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The paper deals with an artificial-vision based system for simultaneous people tracking and posture recognition in the context of human-computer interaction. We adopt no particular assumptions on the movement of a person and on its appearance, making the system suitable to several real-world applications. The system can be roughly subdivided into two highly correlated phases: tracking and recognition. The tracking</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="81d0ba063ef4e6cbb1081661c3a5bfe7" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":42259707,"asset_id":17441122,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/42259707/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441122"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441122"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441122; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441122]").text(description); $(".js-view-count[data-work-id=17441122]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441122; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441122']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "81d0ba063ef4e6cbb1081661c3a5bfe7" } } $('.js-work-strip[data-work-id=17441122]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441122,"title":"A System for Simultaneous People Tracking and Posture Recognition in the context of Human-Computer Interaction","internal_url":"https://www.academia.edu/17441122/A_System_for_Simultaneous_People_Tracking_and_Posture_Recognition_in_the_context_of_Human_Computer_Interaction","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[{"id":42259707,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/42259707/thumbnails/1.jpg","file_name":"EURCON.2005.1630114.pdf20160206-25462-m65j31","download_url":"https://www.academia.edu/attachments/42259707/download_file","bulk_download_file_name":"A_System_for_Simultaneous_People_Trackin.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/42259707/EURCON.2005.1630114-libre.pdf20160206-25462-m65j31?1454824468=\u0026response-content-disposition=attachment%3B+filename%3DA_System_for_Simultaneous_People_Trackin.pdf\u0026Expires=1741087464\u0026Signature=VDg-PwEZzE2WAupQ0GfAPNeouXawsmhTD7veYxt8DxiMJLiW~xrQ-7LMDeHGihyUSGbYJ2LYnf5lSYvg7terNIfbT5W-YKVv5cWh~UjD3H8q1iz3QLGX1evTa3LmlIV7YVtaXsvZFrWLGjmCB9fMFW9LPfRrRQUed3hCdhZMB20m25ubn6lWtfA2uVWx8mDj2uuy5W4te-KzmZ0WRkmdvkRa-ACCR05IAzDJwLSxyu94oAVLEclV93cvL5GxxAQ~mIo4g8TvzqE7Ou1dqQtQhAAxu6qzzJPRSYLsoMXGuwQJH4dX6tVnqh~IYnkNOFcsu50xejXYh3rjqJ0DvZgZaQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441121"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/17441121/A_cognitive_architecture_for_Robotic_hand_posture_learning"><img alt="Research paper thumbnail of A cognitive architecture for Robotic hand posture learning" class="work-thumbnail" src="https://attachments.academia-assets.com/39509757/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/17441121/A_cognitive_architecture_for_Robotic_hand_posture_learning">A cognitive architecture for Robotic hand posture learning</a></div><div class="wp-workCard_item"><span>IEEE Transactions on Systems, Man, and Cybernetics</span><span>, 2005</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The paper deals with the design and implementation of a visual control of a robotic system compos...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The paper deals with the design and implementation of a visual control of a robotic system composed of a dexterous hand and a video camera. The aim of the proposed system is to reproduce the movements of a human hand in order to learn complex manipulation tasks or to interact with the user. A novel algorithm for a robust and</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="9c0d7605df4a493c8b50566374c162d7" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":39509757,"asset_id":17441121,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/39509757/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441121"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441121"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441121; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441121]").text(description); $(".js-view-count[data-work-id=17441121]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441121; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441121']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "9c0d7605df4a493c8b50566374c162d7" } } $('.js-work-strip[data-work-id=17441121]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441121,"title":"A cognitive architecture for Robotic hand posture learning","internal_url":"https://www.academia.edu/17441121/A_cognitive_architecture_for_Robotic_hand_posture_learning","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[{"id":39509757,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/39509757/thumbnails/1.jpg","file_name":"A_cognitive_architecture_for_Robotic_han20151028-7871-vvafqb.pdf","download_url":"https://www.academia.edu/attachments/39509757/download_file","bulk_download_file_name":"A_cognitive_architecture_for_Robotic_han.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/39509757/A_cognitive_architecture_for_Robotic_han20151028-7871-vvafqb-libre.pdf?1446077463=\u0026response-content-disposition=attachment%3B+filename%3DA_cognitive_architecture_for_Robotic_han.pdf\u0026Expires=1741087464\u0026Signature=RnhKYcnjNYf37azFIbbFZoMjg4d7WfB7gKKCDN45pmiRloNb6mVLSXr4h1GfwOq-khkDcd6oisJKXybzTlPe36WZAyJkhs59PtyidkkLIA2aTe2Bn6~cMaId42-raBVt8-xT8i2d7hyKlBjZyVC82iHpBfq8aUk0EHrLeo2Ob46B-S6byDccMhK26haHgZxlCBQeUtxy5o3UbDiqt-zwEPWEIuCFCoqZ7cpVX5C6ZhT~sOF~shGmGXQVZg2gRBC7KF~9~8j9qLTn32EhooSMYwKJ6bj8turBU8h0tCTM5u8fwHu7-F2JR0uq0PfYF5zG-IjILZBpUvCdVuYWG78rdg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441120"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/17441120/Discriminating_and_simulating_actions_with_the_associative_self_organising_map"><img alt="Research paper thumbnail of Discriminating and simulating actions with the associative self-organising map" class="work-thumbnail" src="https://attachments.academia-assets.com/39509776/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/17441120/Discriminating_and_simulating_actions_with_the_associative_self_organising_map">Discriminating and simulating actions with the associative self-organising map</a></div><div class="wp-workCard_item"><span>Connection Science</span><span>, 2015</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">We propose a system able to represent others' actions as well as to internally simulate their lik...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">We propose a system able to represent others' actions as well as to internally simulate their likely continuation from a partial observation. The approach presented here is the first step towards a more ambitious goal of endowing an artificial agent with the ability to recognise and predict others' intentions. Our approach is based on the associative self-organising map, a variant of the self-organising map capable of learning to associate its activity with different inputs over time, where inputs are processed observations of others' actions. We have evaluated our system in two different experimental scenarios obtaining promising results: the system demonstrated an ability to learn discriminable representations of actions, to recognise novel input, and to simulate the likely continuation of partially seen actions.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="a7aaf2fadef48f492219af07e51ffaaa" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":39509776,"asset_id":17441120,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/39509776/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441120"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441120"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441120; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441120]").text(description); $(".js-view-count[data-work-id=17441120]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441120; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441120']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "a7aaf2fadef48f492219af07e51ffaaa" } } $('.js-work-strip[data-work-id=17441120]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441120,"title":"Discriminating and simulating actions with the associative self-organising map","internal_url":"https://www.academia.edu/17441120/Discriminating_and_simulating_actions_with_the_associative_self_organising_map","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[{"id":39509776,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/39509776/thumbnails/1.jpg","file_name":"Discriminating_and_simulating_actions_wi20151028-16066-1frmq5q.pdf","download_url":"https://www.academia.edu/attachments/39509776/download_file","bulk_download_file_name":"Discriminating_and_simulating_actions_wi.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/39509776/Discriminating_and_simulating_actions_wi20151028-16066-1frmq5q-libre.pdf?1446077459=\u0026response-content-disposition=attachment%3B+filename%3DDiscriminating_and_simulating_actions_wi.pdf\u0026Expires=1741087464\u0026Signature=VAbwVMqxyLRuj3G-lrYCe1cU8HGrzDOWBz9BdPhqzYJm~-YHszX0nAVUELO-XGE1X4FmYnIvnFnP-N4ah5EQNCz~9yn0nvkvYKpOES6EaUDNhWfuVz7q2oUJiaebz46cfV4F-bzNWzUAb2srD-aESietiTN9ehhHYIWvDH8MeBZce-irLwJDmhuo8MJ7q2wOpxnllco672U9S~ewvNPlxIwZ4Sm2KtvxVuQk5fSqG-ShGzs4ZBWRjI6vie81Vwdu8M9nY09rNKrnTuYTGFeHiMZuCzYLqx6PQFVV9tdR8DRm-zwv~8NIszPi2rKCutRo-ZmZSfl2DhntYyKMgU9ThQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441119"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/17441119/Bounded_Recursive_Self_Improvement_RU_TR_13006"><img alt="Research paper thumbnail of Bounded.Recursive.Self.Improvement.RU.TR-13006" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/17441119/Bounded_Recursive_Self_Improvement_RU_TR_13006">Bounded.Recursive.Self.Improvement.RU.TR-13006</a></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441119"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441119"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441119; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441119]").text(description); $(".js-view-count[data-work-id=17441119]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441119; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441119']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=17441119]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441119,"title":"Bounded.Recursive.Self.Improvement.RU.TR-13006","internal_url":"https://www.academia.edu/17441119/Bounded_Recursive_Self_Improvement_RU_TR_13006","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> </div><div class="profile--tab_content_container js-tab-pane tab-pane" data-section-id="3871645" id="papers"><div class="js-work-strip profile--work_container" data-work-id="8331686"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/8331686/Autonomous_Acquisition_of_Natural_Language"><img alt="Research paper thumbnail of Autonomous Acquisition of Natural Language" class="work-thumbnail" src="https://attachments.academia-assets.com/34736352/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/8331686/Autonomous_Acquisition_of_Natural_Language">Autonomous Acquisition of Natural Language</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://reykjavik.academia.edu/KrisitnnRTh%C3%B3risson">Kristinn R. Thórisson</a>, <a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/HarisDindo">Haris Dindo</a>, <a class="" data-click-track="profile-work-strip-authors" href="https://upm-es.academia.edu/ManuelRodriguez">Manuel Rodriguez</a>, <a class="" data-click-track="profile-work-strip-authors" href="https://upm-es.academia.edu/RicardoSanz">Ricardo Sanz</a>, <a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/HelgiHelgason">Helgi Helgason</a>, and <a class="" data-click-track="profile-work-strip-authors" href="https://hi.academia.edu/GudbergJonsson">Gudberg K Jonsson</a></span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">An important part of human intelligence is the ability to use language. Humans learn how to use l...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">An important part of human intelligence is the ability to use language. Humans learn how to use language in a society of language users, which is probably the most effective way to learn a language from the ground up. Principles that might allow an artificial agents to learn language this way are not known at present. Here we present a framework which begins to address this challenge. Our auto-catalytic, endogenous, reflective architecture (AERA) supports the creation of agents that can learn natural language by observation. We present results from two experiments where our S1 agent learns human communication by observing two humans interacting in a realtime mock television interview, using gesture and situated language. Results show that S1 can learn multimodal complex language and multimodal communicative acts, using a vocabulary of 100 words with numerous sentence formats, by observing unscripted interaction between the humans, with no grammar being provided to it a priori, and only high-level information about the format of the human interaction in the form of high-level goals of the interviewer and interviewee and a small ontology. The agent learns both the pragmatics, semantics, and syntax of complex sentences spoken by the human subjects on the topic of recycling of objects such as aluminum cans, glass bottles, plastic, and wood, as well as use of manual deictic reference and anaphora.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="cdc88d36f5cae01e068214444a2717ae" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":34736352,"asset_id":8331686,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/34736352/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="8331686"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="8331686"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 8331686; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=8331686]").text(description); $(".js-view-count[data-work-id=8331686]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 8331686; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='8331686']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "cdc88d36f5cae01e068214444a2717ae" } } $('.js-work-strip[data-work-id=8331686]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":8331686,"title":"Autonomous Acquisition of Natural Language","translated_title":"","metadata":{"abstract":"An important part of human intelligence is the ability to use language. Humans learn how to use language in a society of language users, which is probably the most effective way to learn a language from the ground up. Principles that might allow an artificial agents to learn language this way are not known at present. Here we present a framework which begins to address this challenge. Our auto-catalytic, endogenous, reflective architecture (AERA) supports the creation of agents that can learn natural language by observation. We present results from two experiments where our S1 agent learns human communication by observing two humans interacting in a realtime mock television interview, using gesture and situated language. Results show that S1 can learn multimodal complex language and multimodal communicative acts, using a vocabulary of 100 words with numerous sentence formats, by observing unscripted interaction between the humans, with no grammar being provided to it a priori, and only high-level information about the format of the human interaction in the form of high-level goals of the interviewer and interviewee and a small ontology. The agent learns both the pragmatics, semantics, and syntax of complex sentences spoken by the human subjects on the topic of recycling of objects such as aluminum cans, glass bottles, plastic, and wood, as well as use of manual deictic reference and anaphora.","ai_title_tag":"Learning Language through Human Interaction"},"translated_abstract":"An important part of human intelligence is the ability to use language. Humans learn how to use language in a society of language users, which is probably the most effective way to learn a language from the ground up. Principles that might allow an artificial agents to learn language this way are not known at present. Here we present a framework which begins to address this challenge. Our auto-catalytic, endogenous, reflective architecture (AERA) supports the creation of agents that can learn natural language by observation. We present results from two experiments where our S1 agent learns human communication by observing two humans interacting in a realtime mock television interview, using gesture and situated language. Results show that S1 can learn multimodal complex language and multimodal communicative acts, using a vocabulary of 100 words with numerous sentence formats, by observing unscripted interaction between the humans, with no grammar being provided to it a priori, and only high-level information about the format of the human interaction in the form of high-level goals of the interviewer and interviewee and a small ontology. The agent learns both the pragmatics, semantics, and syntax of complex sentences spoken by the human subjects on the topic of recycling of objects such as aluminum cans, glass bottles, plastic, and wood, as well as use of manual deictic reference and anaphora.","internal_url":"https://www.academia.edu/8331686/Autonomous_Acquisition_of_Natural_Language","translated_internal_url":"","created_at":"2014-09-15T01:00:02.979-07:00","preview_url":null,"current_user_can_edit":null,"current_user_is_owner":null,"owner_id":7087,"coauthors_can_edit":true,"document_type":"paper","co_author_tags":[{"id":11997457,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":null,"co_author_invite_id":952004,"email":"d***o@csai.unipa.it","display_order":0,"name":"Haris Dindo","title":"Autonomous Acquisition of Natural Language"},{"id":11997458,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":null,"co_author_invite_id":1831181,"email":"d***e@istc.cnr.it","display_order":4194304,"name":"Dimitri Ognibene","title":"Autonomous Acquisition of Natural Language"},{"id":11997784,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":37173809,"co_author_invite_id":null,"email":"h***o@gmail.com","display_order":6291456,"name":"Haris Dindo","title":"Autonomous Acquisition of Natural Language"},{"id":11997785,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":null,"co_author_invite_id":1831180,"email":"d***e@imperial.ac.uk","display_order":7340032,"name":"Dimitri Ognibene","title":"Autonomous Acquisition of Natural Language"},{"id":12000188,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":5635344,"co_author_invite_id":null,"email":"m***r@gmail.com","affiliation":"Universidad Politécnica de Madrid","display_order":7864320,"name":"Manuel Rodriguez","title":"Autonomous Acquisition of Natural Language"},{"id":12000190,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":null,"co_author_invite_id":1831176,"email":"b***s@idsia.ch","display_order":8126464,"name":"Bas Steunebrink","title":"Autonomous Acquisition of Natural Language"},{"id":12000192,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":null,"co_author_invite_id":1290101,"email":"g***o@istc.cnr.it","display_order":8257536,"name":"Giovanni Pezzulo","title":"Autonomous Acquisition of Natural Language"},{"id":12000194,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":19635,"co_author_invite_id":null,"email":"g***o@istc.cnr.it","affiliation":"Consiglio Nazionale delle Ricerche (CNR)","display_order":8323072,"name":"Giovanni Pezzulo","title":"Autonomous Acquisition of Natural Language"},{"id":12000195,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":null,"co_author_invite_id":1813568,"email":"p***o@ip.rm.cnr.it","display_order":8355840,"name":"Giovanni Pezzulo","title":"Autonomous Acquisition of Natural Language"},{"id":12000196,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":null,"co_author_invite_id":608288,"email":"g***o@cnr.it","display_order":8372224,"name":"Giovanni Pezzulo","title":"Autonomous Acquisition of Natural Language"},{"id":12000197,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":null,"co_author_invite_id":323007,"email":"g***o@gmail.com","display_order":8380416,"name":"Giovanni Pezzulo","title":"Autonomous Acquisition of Natural Language"},{"id":12000198,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":null,"co_author_invite_id":1831179,"email":"s***z@disam.upm.es","display_order":8384512,"name":"Ricardo Sanz","title":"Autonomous Acquisition of Natural Language"},{"id":12000199,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":null,"co_author_invite_id":1831178,"email":"r***z@aslab.org","display_order":8386560,"name":"Ricardo Sanz","title":"Autonomous Acquisition of Natural Language"},{"id":12000200,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":38811,"co_author_invite_id":null,"email":"R***z@upm.es","affiliation":"Universidad Politécnica de Madrid","display_order":8387584,"name":"Ricardo Sanz","title":"Autonomous Acquisition of Natural Language"},{"id":12000201,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":34136706,"co_author_invite_id":null,"email":"r***z@gmail.com","display_order":8388096,"name":"Ricardo Sanz","title":"Autonomous Acquisition of Natural Language"},{"id":12000202,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":null,"co_author_invite_id":867126,"email":"r***z@etsii.upm.es","display_order":8388352,"name":"Ricardo Sanz","title":"Autonomous Acquisition of Natural Language"},{"id":19602886,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":41490850,"co_author_invite_id":null,"email":"h***i@perseptio.com","display_order":8388544,"name":"Helgi Helgason","title":"Autonomous Acquisition of Natural Language"},{"id":29744040,"work_id":8331686,"tagging_user_id":41490850,"tagged_user_id":44974171,"co_author_invite_id":null,"email":"g***n@hi.is","affiliation":"University of Iceland","display_order":8388576,"name":"Gudberg K Jonsson","title":"Autonomous Acquisition of Natural Language"},{"id":29744049,"work_id":8331686,"tagging_user_id":41490850,"tagged_user_id":null,"co_author_invite_id":1532486,"email":"m***z@ub.edu","display_order":8388592,"name":"M. Rodríguez","title":"Autonomous Acquisition of Natural Language"},{"id":32284586,"work_id":8331686,"tagging_user_id":7087,"tagged_user_id":null,"co_author_invite_id":6434089,"email":"c***z@upm.es","display_order":8388600,"name":"Carlos Hernández","title":"Autonomous Acquisition of Natural Language"}],"downloadable_attachments":[{"id":34736352,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/34736352/thumbnails/1.jpg","file_name":"AAoNL-wHeadr.pdf","download_url":"https://www.academia.edu/attachments/34736352/download_file","bulk_download_file_name":"Autonomous_Acquisition_of_Natural_Langua.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/34736352/AAoNL-wHeadr-libre.pdf?1410768094=\u0026response-content-disposition=attachment%3B+filename%3DAutonomous_Acquisition_of_Natural_Langua.pdf\u0026Expires=1738693944\u0026Signature=JuMm06gxQE1OMJV8Pu5dnLh7XFtXVSZ6opVGpEaMnGvuQZ2FOeLNhiHVL~P8IcBtRmKUktELQjvETUMD5daF9W17hSUfdDUuo5utFip4AtQ6O7XiZ8xszw7OLUk9jjNZoOcvBK0UEoCGwh3d~uPBvjAFCQmL1YsuUazGDbUEnlGYBuHx7xVJObs~hUd1eA9R5wkP88Maniu~~CzBB7KdOPFwzauWpFBtllpGARhdcSKcCE6btyObDyfgUvSPm549SQKYv-Ww~TSlT~eaH~ZUOHKeLmS5oITyXQ1hXxg7effMuejbMAxdttqBzxHd518ksmdrq052x9N76VhaGMdSRg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"slug":"Autonomous_Acquisition_of_Natural_Language","translated_slug":"","page_count":10,"language":"en","content_type":"Work","summary":"An important part of human intelligence is the ability to use language. Humans learn how to use language in a society of language users, which is probably the most effective way to learn a language from the ground up. Principles that might allow an artificial agents to learn language this way are not known at present. Here we present a framework which begins to address this challenge. Our auto-catalytic, endogenous, reflective architecture (AERA) supports the creation of agents that can learn natural language by observation. We present results from two experiments where our S1 agent learns human communication by observing two humans interacting in a realtime mock television interview, using gesture and situated language. Results show that S1 can learn multimodal complex language and multimodal communicative acts, using a vocabulary of 100 words with numerous sentence formats, by observing unscripted interaction between the humans, with no grammar being provided to it a priori, and only high-level information about the format of the human interaction in the form of high-level goals of the interviewer and interviewee and a small ontology. The agent learns both the pragmatics, semantics, and syntax of complex sentences spoken by the human subjects on the topic of recycling of objects such as aluminum cans, glass bottles, plastic, and wood, as well as use of manual deictic reference and anaphora.","owner":{"id":7087,"first_name":"Kristinn R.","middle_initials":null,"last_name":"Thórisson","page_name":"KrisitnnRThórisson","domain_name":"reykjavik","created_at":"2008-10-16T00:55:01.197-07:00","display_name":"Kristinn R. Thórisson","url":"https://reykjavik.academia.edu/KrisitnnRTh%C3%B3risson"},"attachments":[{"id":34736352,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/34736352/thumbnails/1.jpg","file_name":"AAoNL-wHeadr.pdf","download_url":"https://www.academia.edu/attachments/34736352/download_file","bulk_download_file_name":"Autonomous_Acquisition_of_Natural_Langua.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/34736352/AAoNL-wHeadr-libre.pdf?1410768094=\u0026response-content-disposition=attachment%3B+filename%3DAutonomous_Acquisition_of_Natural_Langua.pdf\u0026Expires=1738693944\u0026Signature=JuMm06gxQE1OMJV8Pu5dnLh7XFtXVSZ6opVGpEaMnGvuQZ2FOeLNhiHVL~P8IcBtRmKUktELQjvETUMD5daF9W17hSUfdDUuo5utFip4AtQ6O7XiZ8xszw7OLUk9jjNZoOcvBK0UEoCGwh3d~uPBvjAFCQmL1YsuUazGDbUEnlGYBuHx7xVJObs~hUd1eA9R5wkP88Maniu~~CzBB7KdOPFwzauWpFBtllpGARhdcSKcCE6btyObDyfgUvSPm549SQKYv-Ww~TSlT~eaH~ZUOHKeLmS5oITyXQ1hXxg7effMuejbMAxdttqBzxHd518ksmdrq052x9N76VhaGMdSRg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}],"research_interests":[{"id":77,"name":"Robotics","url":"https://www.academia.edu/Documents/in/Robotics"},{"id":78,"name":"Control Systems Engineering","url":"https://www.academia.edu/Documents/in/Control_Systems_Engineering"},{"id":237,"name":"Cognitive Science","url":"https://www.academia.edu/Documents/in/Cognitive_Science"},{"id":465,"name":"Artificial Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_Intelligence"},{"id":1432,"name":"Natural Language Processing","url":"https://www.academia.edu/Documents/in/Natural_Language_Processing"},{"id":11136,"name":"Cognitive Robotics","url":"https://www.academia.edu/Documents/in/Cognitive_Robotics"},{"id":12428,"name":"Automatic Control","url":"https://www.academia.edu/Documents/in/Automatic_Control"},{"id":14197,"name":"Autonomy","url":"https://www.academia.edu/Documents/in/Autonomy"},{"id":25271,"name":"Artificial General Intelligence","url":"https://www.academia.edu/Documents/in/Artificial_General_Intelligence"}],"urls":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="20159379"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/20159379/Bounded_Recursive_Self_Improvement_RU_TR_13006"><img alt="Research paper thumbnail of Bounded.Recursive.Self.Improvement.RU.TR-13006" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/20159379/Bounded_Recursive_Self_Improvement_RU_TR_13006">Bounded.Recursive.Self.Improvement.RU.TR-13006</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://reykjavik.academia.edu/KrisitnnRTh%C3%B3risson">Kristinn R. Thórisson</a>, <a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/EricNivel">Eric Nivel</a>, <a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/HarisDindo">Haris Dindo</a>, and <a class="" data-click-track="profile-work-strip-authors" href="https://tudelft.academia.edu/CarlosHern%C3%A1ndezCorbato">Carlos Hernández Corbato</a></span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="20159379"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="20159379"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 20159379; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=20159379]").text(description); $(".js-view-count[data-work-id=20159379]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 20159379; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='20159379']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=20159379]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":20159379,"title":"Bounded.Recursive.Self.Improvement.RU.TR-13006","internal_url":"https://www.academia.edu/20159379/Bounded_Recursive_Self_Improvement_RU_TR_13006","owner_id":41269104,"coauthors_can_edit":true,"owner":{"id":41269104,"first_name":"Eric","middle_initials":null,"last_name":"Nivel","page_name":"EricNivel","domain_name":"independent","created_at":"2016-01-11T03:29:37.134-08:00","display_name":"Eric Nivel","url":"https://independent.academia.edu/EricNivel"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="20159382"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/20159382/Bounded_Seed_AGI"><img alt="Research paper thumbnail of Bounded Seed-AGI" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/20159382/Bounded_Seed_AGI">Bounded Seed-AGI</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/EricNivel">Eric Nivel</a>, <a class="" data-click-track="profile-work-strip-authors" href="https://reykjavik.academia.edu/KrisitnnRTh%C3%B3risson">Kristinn R. Thórisson</a>, and <a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/HarisDindo">Haris Dindo</a></span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Four principal features of autonomous control systems are left both unaddressed and unaddressable...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Four principal features of autonomous control systems are left both unaddressed and unaddressable by present-day engineering methodologies: (1) The ability to operate effectively in environments that are only partially known at design time; (2) A level of generality that allows a system to reassess and redefine the fulfillment of its mission in light of unexpected constraints or other unforeseen changes in the environment; (3) The ability to operate effectively in environments of significant complexity; and (4) The ability to degrade gracefully— how it can continue striving to achieve its main goals when resources become scarce, or in light of other expected or unexpected constraining factors that impede its progress. We describe new methodological and engineering principles for addressing these shortcomings, that we have used to design a machine that becomes increasingly better at behaving in underspecified circumstances, in a goal-directed way, on the job, by modeling itself and i...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="20159382"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="20159382"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 20159382; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=20159382]").text(description); $(".js-view-count[data-work-id=20159382]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 20159382; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='20159382']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=20159382]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":20159382,"title":"Bounded Seed-AGI","internal_url":"https://www.academia.edu/20159382/Bounded_Seed_AGI","owner_id":41269104,"coauthors_can_edit":true,"owner":{"id":41269104,"first_name":"Eric","middle_initials":null,"last_name":"Nivel","page_name":"EricNivel","domain_name":"independent","created_at":"2016-01-11T03:29:37.134-08:00","display_name":"Eric Nivel","url":"https://independent.academia.edu/EricNivel"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441134"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/17441134/Filling_in_Gaps_in_Textured_Images_Using_Bit_Plane_Statistics"><img alt="Research paper thumbnail of Filling-in Gaps in Textured Images Using Bit-Plane Statistics" class="work-thumbnail" src="https://attachments.academia-assets.com/39509771/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/17441134/Filling_in_Gaps_in_Textured_Images_Using_Bit_Plane_Statistics">Filling-in Gaps in Textured Images Using Bit-Plane Statistics</a></div><div class="wp-workCard_item"><span>Computer Vision Theory and Applications</span><span>, 2008</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">In this paper we propose a novel approach for the texture analysis-synthesis problem, with the pu...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">In this paper we propose a novel approach for the texture analysis-synthesis problem, with the purpose to restore missing zones in greyscale images. Bit-plane decomposition is used, and a dictionary is build with bit-blocks statistics for each plane. Gaps are reconstructed with a conditional stochastic process, to propagate texture global features into the damaged area, using information stored in the dictionary. Our restoration method is simple, easy and fast, with very good results for a large set of textured images. Results are compared with a state-of-the-art restoration algorithm.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="11b671494b9919947328f42b71293aae" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":39509771,"asset_id":17441134,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/39509771/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441134"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441134"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441134; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441134]").text(description); $(".js-view-count[data-work-id=17441134]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441134; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441134']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "11b671494b9919947328f42b71293aae" } } $('.js-work-strip[data-work-id=17441134]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441134,"title":"Filling-in Gaps in Textured Images Using Bit-Plane Statistics","internal_url":"https://www.academia.edu/17441134/Filling_in_Gaps_in_Textured_Images_Using_Bit_Plane_Statistics","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[{"id":39509771,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/39509771/thumbnails/1.jpg","file_name":"Filling-in_Gaps_in_Textured_Images_Using20151028-27290-zwcj0i.pdf","download_url":"https://www.academia.edu/attachments/39509771/download_file","bulk_download_file_name":"Filling_in_Gaps_in_Textured_Images_Using.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/39509771/Filling-in_Gaps_in_Textured_Images_Using20151028-27290-zwcj0i-libre.pdf?1446077458=\u0026response-content-disposition=attachment%3B+filename%3DFilling_in_Gaps_in_Textured_Images_Using.pdf\u0026Expires=1741087463\u0026Signature=hG2DmKsE1eLqV8UQBlPoTaqufnoDMhu1AElAFl84EI~d5yKYa576Zi6CLRchNss3-08tts2gAHl72dAmROvRZTJw2KdK2eCi9vzsdnd8qfjrzJclWn~hle5QrgbCzqqBFokJSUvfsv6ibToY4BMQkzWGecdyfgs4V7WUv-f7h4kr7vdaYOUn6XDI6AHUIjPqvHQ5Ubi8sxuKDzamO6wZyMLmMgql6Ybi0kb6VlaAQK328plRwHbqOe0W1Af4begL~mFC8SuCAq5BKbav8dFDT3MJTZRM9TYWvRoQTd8VPRm07NeuHXc9SDqbS0~A0zmT4AIO0BE-1yGAuHvlbDXtwA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="14525320"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/14525320/The_intentional_stance_as_structure_learning_a_computational_perspective_on_mindreading"><img alt="Research paper thumbnail of The intentional stance as structure learning: a computational perspective on mindreading" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/14525320/The_intentional_stance_as_structure_learning_a_computational_perspective_on_mindreading">The intentional stance as structure learning: a computational perspective on mindreading</a></div><div class="wp-workCard_item wp-workCard--coauthors"><span>by </span><span><a class="" data-click-track="profile-work-strip-authors" href="https://independent.academia.edu/HarisDindo">Haris Dindo</a> and <a class="" data-click-track="profile-work-strip-authors" href="https://cnr-it.academia.edu/FrancescoDonnarumma">Francesco Donnarumma</a></span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Recent theories of mindreading explain the recognition of action, intention, and belief of other ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Recent theories of mindreading explain the recognition of action, intention, and belief of other agents in terms of generative architectures that model the causal relations between observables (e.g., observed movements) and their hidden causes (e.g., action goals and beliefs). Two kinds of probabilistic generative schemes have been proposed in cognitive science and robotics that link to a “theory theory” and “simulation theory” of mindreading, respectively. The former compares perceived actions to optimal plans derived from rationality principles and conceptual theories of others’ minds. The latter reuses one’s own internal (inverse and forward) models for action execution to perform a look-ahead mental simulation of perceived actions. Both theories, however, leave one question unanswered: how are the generative models – including task structure and parameters – learned in the first place? We start from Dennett’s “intentional stance” proposal and characterize it within generative theories of action and intention recognition. We propose that humans use an intentional stance as a learning bias that sidesteps the (hard) structure learning problem and bootstraps the acquisition of generative models for others’ actions. The intentional stance corresponds to a candidate structure in the generative scheme, which encodes a simplified belief-desire folk psychology and a hierarchical intention-to-action organization of behavior. This simple structure can be used as a proxy for the “true” generative structure of others’ actions and intentions and is continuously grown and refined – via state and parameter learning – during interactions. In turn – as our computational simulations show – this can help solve mindreading problems and bootstrap the acquisition of useful causal models of both one’s own and others’ goal-directed actions.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="14525320"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="14525320"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 14525320; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=14525320]").text(description); $(".js-view-count[data-work-id=14525320]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 14525320; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='14525320']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=14525320]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":14525320,"title":"The intentional stance as structure learning: a computational perspective on mindreading","internal_url":"https://www.academia.edu/14525320/The_intentional_stance_as_structure_learning_a_computational_perspective_on_mindreading","owner_id":1770564,"coauthors_can_edit":true,"owner":{"id":1770564,"first_name":"Francesco","middle_initials":null,"last_name":"Donnarumma","page_name":"FrancescoDonnarumma","domain_name":"cnr-it","created_at":"2012-05-17T23:38:07.273-07:00","display_name":"Francesco Donnarumma","url":"https://cnr-it.academia.edu/FrancescoDonnarumma"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441133"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/17441133/A_Cognitive_Framework_for_Imitation_Learning"><img alt="Research paper thumbnail of A Cognitive Framework for Imitation Learning" class="work-thumbnail" src="https://attachments.academia-assets.com/39509753/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/17441133/A_Cognitive_Framework_for_Imitation_Learning">A Cognitive Framework for Imitation Learning</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">In order to have a robotic system able to effectively learn by imitation, and not merely reproduc...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">In order to have a robotic system able to effectively learn by imitation, and not merely reproduce the movements of a human teacher, the system should have the capabilities of deeply understanding the perceived actions to be imitated. This paper deals with the development of cognitive architecture for learning by imitation in which a rich conceptual representation of the observed</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="6e63153fa0736694d7cbd23f6a58654f" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":39509753,"asset_id":17441133,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/39509753/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441133"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441133"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441133; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441133]").text(description); $(".js-view-count[data-work-id=17441133]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441133; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441133']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "6e63153fa0736694d7cbd23f6a58654f" } } $('.js-work-strip[data-work-id=17441133]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441133,"title":"A Cognitive Framework for Imitation Learning","internal_url":"https://www.academia.edu/17441133/A_Cognitive_Framework_for_Imitation_Learning","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[{"id":39509753,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/39509753/thumbnails/1.jpg","file_name":"j.robot.2006.01.008.pdf20151028-2740-1fok3ko","download_url":"https://www.academia.edu/attachments/39509753/download_file","bulk_download_file_name":"A_Cognitive_Framework_for_Imitation_Lear.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/39509753/j.robot.2006.01.008-libre.pdf20151028-2740-1fok3ko?1446077459=\u0026response-content-disposition=attachment%3B+filename%3DA_Cognitive_Framework_for_Imitation_Lear.pdf\u0026Expires=1741087463\u0026Signature=RMdZHLb9Oqs6PCI5GHzePz9c3f0q-7J7~rjAslHmH91~FABws6zU-PNwrpCWNQK4ACKCRgu-m09NMgvWAD3HSD~30KEh5Cwks~YfqG-UXv6oIFfycFhsJ4GB236XQMNpxZaJms~IhDD0RWzeOTh5-m7Ge~I0F4F0~igwGygwRw2TJu9BxAu8wneCYih-55VsYuU5Aty0kbdUVf3jXVAawufUQSSgNS5poRHtTv-9zLkPokXKiXF9phNlahzCM2tdjhPKIGTz5KBeSB-AlS3dkFOvHVF-poT8cIB1Zt19CbrID0VI6HqFRgdHTnmi~U5R5pbBllQMswR7qR697DeBug__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441132"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/17441132/A_SET_OF_LOW_LEVEL_DESCRIPTORS_FOR_IMAGES_AFFECTED_BY_FOXING"><img alt="Research paper thumbnail of A SET OF LOW-LEVEL DESCRIPTORS FOR IMAGES AFFECTED BY FOXING" class="work-thumbnail" src="https://attachments.academia-assets.com/39509759/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/17441132/A_SET_OF_LOW_LEVEL_DESCRIPTORS_FOR_IMAGES_AFFECTED_BY_FOXING">A SET OF LOW-LEVEL DESCRIPTORS FOR IMAGES AFFECTED BY FOXING</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">Old printed photos are affected by several typical damages, due to age and bad preservation. &quo...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">Old printed photos are affected by several typical damages, due to age and bad preservation. &quot;Foxing&quot; defects look like red-brownish spots onto the paper of the printed photo. Similar features can be seen in the digitized copies. In this paper we propose a set of low level descriptors to extract features from digitized photos affected by foxing. An image retrieval</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="99b08d18007771151084f98634563059" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":39509759,"asset_id":17441132,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/39509759/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441132"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441132"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441132; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441132]").text(description); $(".js-view-count[data-work-id=17441132]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441132; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441132']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "99b08d18007771151084f98634563059" } } $('.js-work-strip[data-work-id=17441132]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441132,"title":"A SET OF LOW-LEVEL DESCRIPTORS FOR IMAGES AFFECTED BY FOXING","internal_url":"https://www.academia.edu/17441132/A_SET_OF_LOW_LEVEL_DESCRIPTORS_FOR_IMAGES_AFFECTED_BY_FOXING","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[{"id":39509759,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/39509759/thumbnails/1.jpg","file_name":"A_set_of_low-level_descriptors_for_image20151028-18389-1xtbeaq.pdf","download_url":"https://www.academia.edu/attachments/39509759/download_file","bulk_download_file_name":"A_SET_OF_LOW_LEVEL_DESCRIPTORS_FOR_IMAGE.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/39509759/A_set_of_low-level_descriptors_for_image20151028-18389-1xtbeaq-libre.pdf?1446077458=\u0026response-content-disposition=attachment%3B+filename%3DA_SET_OF_LOW_LEVEL_DESCRIPTORS_FOR_IMAGE.pdf\u0026Expires=1741087463\u0026Signature=fnvsOsjrWVsg5odTweApNbFzcScTX07G1p8Dvkm-GGD78WJJZJaKS5HmgrDaHdVkMHs4MUwBaQaKUC25~pExbZnUOQGA86eELd0Jg27YvjO0dxQyyzctL~oJiux3mx56OKHxrrlZDGaYmrE-HqWB-WBbFeyySJptDtQucJR4Y2x60Zd-~e2vQh3spdw8zyImrXzVJsiqfyJ5mz6626nXGivMxD1sEKuxVBZeEbWurQTqdNQFyq3cfP5kUbg4~Vc6BBXEPZMVEcSzKjXEEMxHZVi8Xy--cdsvUrgeb2OfSOvM9i3-WibLvAzMYUBV~7w81HnAWjJ84eJbvMPDm5o0rg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441131"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/17441131/Real_Time_Visual_Grasp_Synthesis_Using_Genetic_Algorithms_and_Neural_Networks"><img alt="Research paper thumbnail of Real-Time Visual Grasp Synthesis Using Genetic Algorithms and Neural Networks" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/17441131/Real_Time_Visual_Grasp_Synthesis_Using_Genetic_Algorithms_and_Neural_Networks">Real-Time Visual Grasp Synthesis Using Genetic Algorithms and Neural Networks</a></div><div class="wp-workCard_item"><span>Lecture Notes in Computer Science</span><span>, 2007</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">This paper addresses the problem of automatic grasp synthesis of unknown planar objects. In other...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">This paper addresses the problem of automatic grasp synthesis of unknown planar objects. In other words, we must compute points on the object&amp;amp;amp;amp;amp;amp;amp;#x27;s boundary to be reached by the robotic fingers such that the resulting grasp, among infinite possibilities, optimizes some given criteria. Objects to be grasped are represented as superellipses, a family of deformable 2D parametric functions. They can model a large variety of shapes occurring often in practice by changing a small number of parameters. The space of possible grasp ...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441131"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441131"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441131; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441131]").text(description); $(".js-view-count[data-work-id=17441131]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441131; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441131']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=17441131]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441131,"title":"Real-Time Visual Grasp Synthesis Using Genetic Algorithms and Neural Networks","internal_url":"https://www.academia.edu/17441131/Real_Time_Visual_Grasp_Synthesis_Using_Genetic_Algorithms_and_Neural_Networks","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441130"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/17441130/An_architecture_for_observational_learning_and_decision_making_based_on_internal_models"><img alt="Research paper thumbnail of An architecture for observational learning and decision making based on internal models" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/17441130/An_architecture_for_observational_learning_and_decision_making_based_on_internal_models">An architecture for observational learning and decision making based on internal models</a></div><div class="wp-workCard_item"><span>Biologically Inspired Cognitive Architectures</span><span>, 2013</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">ABSTRACT We present a cognitive architecture whose main constituents are allowed to grow through ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">ABSTRACT We present a cognitive architecture whose main constituents are allowed to grow through a situated experience in the world. Such an architectural growth is bootstrapped from a minimal initial knowledge and the architecture itself is built around the biologically-inspired notion of internal models. The key idea, supported by findings in cognitive neuroscience, is that the same internal models used in overt goal-directed action execution can be covertly re-enacted in simulation to provide a unifying explanation to a number of apparently unrelated individual and social phenomena, such as state estimation, action and intention understanding, imitation learning and mindreading. Thus, rather than reasoning over abstract symbols, we rely on the biologically plausible processes firmly grounded in the actual sensorimotor experience of the agent. The article describes how such internal models are learned in the first place, either through individual experience or by observing and imitating other skilled agents, and how they are used in action planning and execution. Furthermore, we explain how the architecture continuously adapts its internal agency and how increasingly complex cognitive phenomena, such as continuous learning, prediction and anticipation, result from an interplay of simpler principles. We describe an early evaluation of our approach in a classical AI problem-solving domain: the Sokoban puzzle.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441130"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441130"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441130; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441130]").text(description); $(".js-view-count[data-work-id=17441130]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441130; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441130']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=17441130]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441130,"title":"An architecture for observational learning and decision making based on internal models","internal_url":"https://www.academia.edu/17441130/An_architecture_for_observational_learning_and_decision_making_based_on_internal_models","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441129"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/17441129/Anchoring_by_Imitation_Learning_in_Conceptual_Spaces"><img alt="Research paper thumbnail of Anchoring by Imitation Learning in Conceptual Spaces" class="work-thumbnail" src="https://attachments.academia-assets.com/39509778/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/17441129/Anchoring_by_Imitation_Learning_in_Conceptual_Spaces">Anchoring by Imitation Learning in Conceptual Spaces</a></div><div class="wp-workCard_item"><span>Congress of the Italian Association for Artificial Intelligence</span><span>, 2005</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">In order to have a robotic system able to effectively learn by imita- tion, and not merely reprod...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">In order to have a robotic system able to effectively learn by imita- tion, and not merely reproduce the movements of a human teacher, the system should have the capabilities of deeply understanding the perceived actions to be imitated. This paper deals with the development of a cognitive architecture for learning by imitation in which a rich conceptual representation of</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="cd7cf2ebc5b3c286f411c5e1fd88863e" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":39509778,"asset_id":17441129,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/39509778/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441129"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441129"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441129; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441129]").text(description); $(".js-view-count[data-work-id=17441129]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441129; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441129']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "cd7cf2ebc5b3c286f411c5e1fd88863e" } } $('.js-work-strip[data-work-id=17441129]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441129,"title":"Anchoring by Imitation Learning in Conceptual Spaces","internal_url":"https://www.academia.edu/17441129/Anchoring_by_Imitation_Learning_in_Conceptual_Spaces","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[{"id":39509778,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/39509778/thumbnails/1.jpg","file_name":"Anchoring_by_Imitation_Learning_in_Conce20151028-27290-1d5ckys.pdf","download_url":"https://www.academia.edu/attachments/39509778/download_file","bulk_download_file_name":"Anchoring_by_Imitation_Learning_in_Conce.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/39509778/Anchoring_by_Imitation_Learning_in_Conce20151028-27290-1d5ckys-libre.pdf?1446077458=\u0026response-content-disposition=attachment%3B+filename%3DAnchoring_by_Imitation_Learning_in_Conce.pdf\u0026Expires=1741087463\u0026Signature=Psn2wMP4IUhhUiMpLkqRAsW28BQyXRKQ0VuGQug7mudkW4O6cftWx8zMM2L3po7eSBrRWRGso0P1AbwHrcsDnAxo0Hepa8YFDJfb1BeDLXDp9hSRxVItLg8~DfV5CqvIR9ryJfMiZnWEscW4x3yzhPf1aMdIY~AHRorEAAMkRuhRbgSmei5LO~oghAg1rbXuOGQbks8ZKwyKQWa1ZwSehPuQ1KkBAZu9V8XzwlNWmu0UJlHRg2HYbxqZ7TOTzwiHElf5KEvLDH8ISUfHL3G1j-i4foybjRiJu2EdnuL6CYUSoiLX7AA6nPHljFO9meFl74On6Dlp4wFYzyLUJrhRCQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441128"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/17441128/An_adaptive_probabilistic_graphical_model_for_representing_skills_in_PbD_settings"><img alt="Research paper thumbnail of An adaptive probabilistic graphical model for representing skills in PbD settings" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/17441128/An_adaptive_probabilistic_graphical_model_for_representing_skills_in_PbD_settings">An adaptive probabilistic graphical model for representing skills in PbD settings</a></div><div class="wp-workCard_item"><span>2010 5th ACM/IEEE International Conference on Human-Robot Interaction (HRI)</span><span>, 2010</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">AbstractUnderstanding and efficiently representing skills is one of the most important problems ...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">AbstractUnderstanding and efficiently representing skills is one of the most important problems in a general Programming by Demonstration (PbD) paradigm. We present Growing Hierarchi-cal Dynamic Bayesian Networks (GHDBN), an adaptive variant of the general ...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441128"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441128"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441128; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441128]").text(description); $(".js-view-count[data-work-id=17441128]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441128; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441128']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=17441128]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441128,"title":"An adaptive probabilistic graphical model for representing skills in PbD settings","internal_url":"https://www.academia.edu/17441128/An_adaptive_probabilistic_graphical_model_for_representing_skills_in_PbD_settings","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441127"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/17441127/Learning_high_level_tasks_through_imitation"><img alt="Research paper thumbnail of Learning high-level tasks through imitation" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/17441127/Learning_high_level_tasks_through_imitation">Learning high-level tasks through imitation</a></div><div class="wp-workCard_item"><span>International Conference on Intelligent RObots and Systems - IROS</span><span>, 2006</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">This paper presents the cognitive architecture Con-SCIS (conceptual space based cognitive imitati...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">This paper presents the cognitive architecture Con-SCIS (conceptual space based cognitive imitation system), which tightly links low-level data processing with knowledge representation in the context of imitation learning. We use the word imitate to refer to the paradigm of program-level imitation: we are interested in the final effects of actions on objects, and not on the particular kinematic or dynamic</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441127"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441127"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441127; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441127]").text(description); $(".js-view-count[data-work-id=17441127]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441127; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441127']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=17441127]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441127,"title":"Learning high-level tasks through imitation","internal_url":"https://www.academia.edu/17441127/Learning_high_level_tasks_through_imitation","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441126"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/17441126/Anchoring_by_Imitation_Learning_in_Conceptual_Spaces"><img alt="Research paper thumbnail of Anchoring by Imitation Learning in Conceptual Spaces" class="work-thumbnail" src="https://attachments.academia-assets.com/39509761/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/17441126/Anchoring_by_Imitation_Learning_in_Conceptual_Spaces">Anchoring by Imitation Learning in Conceptual Spaces</a></div><div class="wp-workCard_item"><span>Lecture Notes in Computer Science</span><span>, 2005</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">In order to have a robotic system able to effectively learn by imitation, and not merely reproduc...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">In order to have a robotic system able to effectively learn by imitation, and not merely reproduce the movements of a human teacher, the system should have the capabilities of deeply understanding the perceived actions to be imitated. This paper deals with the development of a cognitive architecture for learning by imitation in which a rich conceptual representation of the observed actions is built. The purpose of the following discussion is to show how the same conceptual representation can be used both in a bottom-up approach, in order to learn sequences of actions by imitation learning paradigm, and in a topdown approach, in order to anchor the symbolical representations to the perceptual activities of the robotic system. The proposed architecture has been tested on the robotic system composed of a PUMA 200 industrial manipulator and an anthropomorphic robotic hand. The system demonstrated the ability to learn and imitate a set of movement primitives acquired through the vision system for simple manipulative purposes.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="5b8e391db144bfe709f80506b4d6bfa0" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":39509761,"asset_id":17441126,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/39509761/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441126"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441126"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441126; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441126]").text(description); $(".js-view-count[data-work-id=17441126]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441126; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441126']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "5b8e391db144bfe709f80506b4d6bfa0" } } $('.js-work-strip[data-work-id=17441126]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441126,"title":"Anchoring by Imitation Learning in Conceptual Spaces","internal_url":"https://www.academia.edu/17441126/Anchoring_by_Imitation_Learning_in_Conceptual_Spaces","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[{"id":39509761,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/39509761/thumbnails/1.jpg","file_name":"Anchoring_by_Imitation_Learning_in_Conce20151028-16072-6vu3zb.pdf","download_url":"https://www.academia.edu/attachments/39509761/download_file","bulk_download_file_name":"Anchoring_by_Imitation_Learning_in_Conce.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/39509761/Anchoring_by_Imitation_Learning_in_Conce20151028-16072-6vu3zb-libre.pdf?1446077459=\u0026response-content-disposition=attachment%3B+filename%3DAnchoring_by_Imitation_Learning_in_Conce.pdf\u0026Expires=1741087463\u0026Signature=IIfNXlb5mA2thfuDfQ1VeuwSlCBvveIogmpdf07Z7SQVQaLt1Gv1mqupM~j6viHTQueh6iqe9vncV2DVIABbQaOQryqtdoJSEC2NqBnDFET6skyHDNsxivpRH24rSmQyCHozKYPUqMA42Iqx~Rs7pPMc4N8F8gRc8t-Hs28~EEoYnITrUq2MrTrivhHCPg-I4PbjohBkrfNKpkt7-9wNuJTEKMmOuB9mxR9JtKBthIYLUJ20Kdg1kOkcy7F~aKSmIB6FvfLWBnhq~6NmLkAiByvVobWGg7oVuwy-Zmx9bXBORLCBwmSx1keTM6gncvThDUaVRmKeUUFWyP7e~tut7A__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441125"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/17441125/A_cognitive_approach_to_goal_level_imitation"><img alt="Research paper thumbnail of A cognitive approach to goal-level imitation" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/17441125/A_cognitive_approach_to_goal_level_imitation">A cognitive approach to goal-level imitation</a></div><div class="wp-workCard_item"><span>Interaction Studies</span><span>, 2008</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">... 306 Antonio Chella, Haris Dindo and Ignazio Infantino of each detected object. Objects are mo...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">... 306 Antonio Chella, Haris Dindo and Ignazio Infantino of each detected object. Objects are modelled with superellipses, a natural generali-sation of ellipses. They can model a large variety of regular shapes by changing a small number of parameters (Jaklic, Leonardis, &amp;amp;amp;amp; ...</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441125"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441125"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441125; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441125]").text(description); $(".js-view-count[data-work-id=17441125]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441125; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441125']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=17441125]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441125,"title":"A cognitive approach to goal-level imitation","internal_url":"https://www.academia.edu/17441125/A_cognitive_approach_to_goal_level_imitation","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441124"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/17441124/People_Tracking_and_Posture_Recognition_for_Human_Robot_Interaction"><img alt="Research paper thumbnail of People Tracking and Posture Recognition for Human-Robot Interaction" class="work-thumbnail" src="https://attachments.academia-assets.com/39509726/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/17441124/People_Tracking_and_Posture_Recognition_for_Human_Robot_Interaction">People Tracking and Posture Recognition for Human-Robot Interaction</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The paper deals with a system for simultaneous people tracking and posture recognition in clutter...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The paper deals with a system for simultaneous people tracking and posture recognition in cluttered environments in the context of human-robot interaction. We adopt no particular assumptions on the movement of a person nor on its appearance, making the system suitable to several real-world applications. The system can be roughly subdivided into two highly correlated phases: tracking and recognition. The</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="c15573169ce77e0e5db28a28bb6788f2" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":39509726,"asset_id":17441124,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/39509726/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441124"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441124"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441124; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441124]").text(description); $(".js-view-count[data-work-id=17441124]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441124; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441124']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "c15573169ce77e0e5db28a28bb6788f2" } } $('.js-work-strip[data-work-id=17441124]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441124,"title":"People Tracking and Posture Recognition for Human-Robot Interaction","internal_url":"https://www.academia.edu/17441124/People_Tracking_and_Posture_Recognition_for_Human_Robot_Interaction","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[{"id":39509726,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/39509726/thumbnails/1.jpg","file_name":"HRI06_3.pdf","download_url":"https://www.academia.edu/attachments/39509726/download_file","bulk_download_file_name":"People_Tracking_and_Posture_Recognition.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/39509726/HRI06_3-libre.pdf?1446077463=\u0026response-content-disposition=attachment%3B+filename%3DPeople_Tracking_and_Posture_Recognition.pdf\u0026Expires=1741087463\u0026Signature=WXClnpoe0CXMr52565cqMTDHXqI~Z4kwxpWCHZMEg1cG7D2vFU2Uh9CMn-iveXqGv3~bxQh1zVUsSveSNW2B5BXHlG5NJHbgyPbz3uQ5GS9sOM6GHcSUERMr9hxAbAsMzSpxeEvLvr4dJ95GCvt06GmUZhqORCZoNoQjExfUgt2oXa4iLGwaN-Mk1dQlvToz~FlciJR9-1Ai6GzmjuW4PpcWz~4Lu6BJKbKjlcd6z0m4qF5C7GO0Jrq453aPkHclc9QPa6O~vQuQNiigosmN57BC--JfmSTC3ch0SPV23v4KEsQFWKamDdXcV1Ze9RRl9e6GfqplXWTDbLdCFL4kkg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"},{"id":39509727,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/39509727/thumbnails/1.jpg","file_name":"HRI06_3.pdf","download_url":"https://www.academia.edu/attachments/39509727/download_file","bulk_download_file_name":"People_Tracking_and_Posture_Recognition.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/39509727/HRI06_3-libre.pdf?1446077463=\u0026response-content-disposition=attachment%3B+filename%3DPeople_Tracking_and_Posture_Recognition.pdf\u0026Expires=1741087463\u0026Signature=PryQ~8Y2frEF6QdcudCylZvVhcka1-DISUHhkmPnR4ETLEGvb-RzAh6VY-kwXmcXLjbaLagkgglEhLdxFjxNgPeQ2Dgo0SuKX~jeCi5~nXBJGgfonvITve1B9jMdel9AHLkwk88AdxljIBBv7K-OaGfLFqpHtj5Bty6wEwE7HFxCLsZqMEiPIZvXT0Dm0-vn0NHJCxRE97D9j2qayYg3DLRsfcJUFme0NxJcw57FKJpNJlVU~8GMTGMfasv3-3EqsMG5AUiW~EYV4GxzuPYGU95TPyX4-qWBn5RO0r5vOie95U8tybZTuPoOIgGXpht3Z4YrNNhZJMmvW-eVnps9rA__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441123"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/17441123/A_Cognitive_Framework_for_Learning_by_Imitation"><img alt="Research paper thumbnail of A Cognitive Framework for Learning by Imitation" class="work-thumbnail" src="https://attachments.academia-assets.com/39509766/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/17441123/A_Cognitive_Framework_for_Learning_by_Imitation">A Cognitive Framework for Learning by Imitation</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">In order to have a robotic system able to effectively learn by imitation, and not merely reproduc...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">In order to have a robotic system able to effectively learn by imitation, and not merely reproduce the movements of a human teacher, the system should have the capabilities of deeply understanding the perceived actions to be imitated. This paper deals with the development of a cognitive framework for learning by imitation in which a rich conceptual representation of the</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="6fc31d6538ebc10f7dc5c7d7eea550fe" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":39509766,"asset_id":17441123,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/39509766/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441123"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441123"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441123; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441123]").text(description); $(".js-view-count[data-work-id=17441123]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441123; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441123']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "6fc31d6538ebc10f7dc5c7d7eea550fe" } } $('.js-work-strip[data-work-id=17441123]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441123,"title":"A Cognitive Framework for Learning by Imitation","internal_url":"https://www.academia.edu/17441123/A_Cognitive_Framework_for_Learning_by_Imitation","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[{"id":39509766,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/39509766/thumbnails/1.jpg","file_name":"A_Cognitive_Framework_for_Learning_by_Im20151028-27290-1emzm9y.pdf","download_url":"https://www.academia.edu/attachments/39509766/download_file","bulk_download_file_name":"A_Cognitive_Framework_for_Learning_by_Im.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/39509766/A_Cognitive_Framework_for_Learning_by_Im20151028-27290-1emzm9y-libre.pdf?1446077459=\u0026response-content-disposition=attachment%3B+filename%3DA_Cognitive_Framework_for_Learning_by_Im.pdf\u0026Expires=1741087463\u0026Signature=ISCYAHtXa0NmZEyzfKfi0WtifGdfo9Tj9qnrr7fg9Ha0E-FzGmvCqRd7X01LrHRcWEcXaYXC5dCxdFJ9VYqCpL7Wq22kRg4fwaLjrpVW9q8X2X2y~pBJynDbR1wTXQl9-r5bQhNm~y~8Nj9kxeQYFlpKC6I7MsRrlYY7-m~3CIdo~xTgPoq7noWoskybFEcjvZV9sdegfCBL9OUDMRBm4Ls6HkVY7aP6OL7TizpYM4HuPS-FjlCGD6eGOUrEyqCSdXrEUp~pKdJuqOZUmlAUn6T7QQY2gKV~HaCdDu~OvE7f5Irs23weAV9-QFUvzYS0nZZ330hAqAqQOllutxOKVw__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441122"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/17441122/A_System_for_Simultaneous_People_Tracking_and_Posture_Recognition_in_the_context_of_Human_Computer_Interaction"><img alt="Research paper thumbnail of A System for Simultaneous People Tracking and Posture Recognition in the context of Human-Computer Interaction" class="work-thumbnail" src="https://attachments.academia-assets.com/42259707/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/17441122/A_System_for_Simultaneous_People_Tracking_and_Posture_Recognition_in_the_context_of_Human_Computer_Interaction">A System for Simultaneous People Tracking and Posture Recognition in the context of Human-Computer Interaction</a></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The paper deals with an artificial-vision based system for simultaneous people tracking and postu...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The paper deals with an artificial-vision based system for simultaneous people tracking and posture recognition in the context of human-computer interaction. We adopt no particular assumptions on the movement of a person and on its appearance, making the system suitable to several real-world applications. The system can be roughly subdivided into two highly correlated phases: tracking and recognition. The tracking</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="81d0ba063ef4e6cbb1081661c3a5bfe7" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":42259707,"asset_id":17441122,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/42259707/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441122"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441122"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441122; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441122]").text(description); $(".js-view-count[data-work-id=17441122]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441122; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441122']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "81d0ba063ef4e6cbb1081661c3a5bfe7" } } $('.js-work-strip[data-work-id=17441122]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441122,"title":"A System for Simultaneous People Tracking and Posture Recognition in the context of Human-Computer Interaction","internal_url":"https://www.academia.edu/17441122/A_System_for_Simultaneous_People_Tracking_and_Posture_Recognition_in_the_context_of_Human_Computer_Interaction","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[{"id":42259707,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/42259707/thumbnails/1.jpg","file_name":"EURCON.2005.1630114.pdf20160206-25462-m65j31","download_url":"https://www.academia.edu/attachments/42259707/download_file","bulk_download_file_name":"A_System_for_Simultaneous_People_Trackin.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/42259707/EURCON.2005.1630114-libre.pdf20160206-25462-m65j31?1454824468=\u0026response-content-disposition=attachment%3B+filename%3DA_System_for_Simultaneous_People_Trackin.pdf\u0026Expires=1741087464\u0026Signature=VDg-PwEZzE2WAupQ0GfAPNeouXawsmhTD7veYxt8DxiMJLiW~xrQ-7LMDeHGihyUSGbYJ2LYnf5lSYvg7terNIfbT5W-YKVv5cWh~UjD3H8q1iz3QLGX1evTa3LmlIV7YVtaXsvZFrWLGjmCB9fMFW9LPfRrRQUed3hCdhZMB20m25ubn6lWtfA2uVWx8mDj2uuy5W4te-KzmZ0WRkmdvkRa-ACCR05IAzDJwLSxyu94oAVLEclV93cvL5GxxAQ~mIo4g8TvzqE7Ou1dqQtQhAAxu6qzzJPRSYLsoMXGuwQJH4dX6tVnqh~IYnkNOFcsu50xejXYh3rjqJ0DvZgZaQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441121"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/17441121/A_cognitive_architecture_for_Robotic_hand_posture_learning"><img alt="Research paper thumbnail of A cognitive architecture for Robotic hand posture learning" class="work-thumbnail" src="https://attachments.academia-assets.com/39509757/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/17441121/A_cognitive_architecture_for_Robotic_hand_posture_learning">A cognitive architecture for Robotic hand posture learning</a></div><div class="wp-workCard_item"><span>IEEE Transactions on Systems, Man, and Cybernetics</span><span>, 2005</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">The paper deals with the design and implementation of a visual control of a robotic system compos...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">The paper deals with the design and implementation of a visual control of a robotic system composed of a dexterous hand and a video camera. The aim of the proposed system is to reproduce the movements of a human hand in order to learn complex manipulation tasks or to interact with the user. A novel algorithm for a robust and</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="9c0d7605df4a493c8b50566374c162d7" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":39509757,"asset_id":17441121,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/39509757/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441121"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441121"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441121; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441121]").text(description); $(".js-view-count[data-work-id=17441121]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441121; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441121']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "9c0d7605df4a493c8b50566374c162d7" } } $('.js-work-strip[data-work-id=17441121]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441121,"title":"A cognitive architecture for Robotic hand posture learning","internal_url":"https://www.academia.edu/17441121/A_cognitive_architecture_for_Robotic_hand_posture_learning","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[{"id":39509757,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/39509757/thumbnails/1.jpg","file_name":"A_cognitive_architecture_for_Robotic_han20151028-7871-vvafqb.pdf","download_url":"https://www.academia.edu/attachments/39509757/download_file","bulk_download_file_name":"A_cognitive_architecture_for_Robotic_han.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/39509757/A_cognitive_architecture_for_Robotic_han20151028-7871-vvafqb-libre.pdf?1446077463=\u0026response-content-disposition=attachment%3B+filename%3DA_cognitive_architecture_for_Robotic_han.pdf\u0026Expires=1741087464\u0026Signature=RnhKYcnjNYf37azFIbbFZoMjg4d7WfB7gKKCDN45pmiRloNb6mVLSXr4h1GfwOq-khkDcd6oisJKXybzTlPe36WZAyJkhs59PtyidkkLIA2aTe2Bn6~cMaId42-raBVt8-xT8i2d7hyKlBjZyVC82iHpBfq8aUk0EHrLeo2Ob46B-S6byDccMhK26haHgZxlCBQeUtxy5o3UbDiqt-zwEPWEIuCFCoqZ7cpVX5C6ZhT~sOF~shGmGXQVZg2gRBC7KF~9~8j9qLTn32EhooSMYwKJ6bj8turBU8h0tCTM5u8fwHu7-F2JR0uq0PfYF5zG-IjILZBpUvCdVuYWG78rdg__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441120"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" href="https://www.academia.edu/17441120/Discriminating_and_simulating_actions_with_the_associative_self_organising_map"><img alt="Research paper thumbnail of Discriminating and simulating actions with the associative self-organising map" class="work-thumbnail" src="https://attachments.academia-assets.com/39509776/thumbnails/1.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" href="https://www.academia.edu/17441120/Discriminating_and_simulating_actions_with_the_associative_self_organising_map">Discriminating and simulating actions with the associative self-organising map</a></div><div class="wp-workCard_item"><span>Connection Science</span><span>, 2015</span></div><div class="wp-workCard_item"><span class="js-work-more-abstract-truncated">We propose a system able to represent others' actions as well as to internally simulate their lik...</span><a class="js-work-more-abstract" data-broccoli-component="work_strip.more_abstract" data-click-track="profile-work-strip-more-abstract" href="javascript:;"><span> more </span><span><i class="fa fa-caret-down"></i></span></a><span class="js-work-more-abstract-untruncated hidden">We propose a system able to represent others' actions as well as to internally simulate their likely continuation from a partial observation. The approach presented here is the first step towards a more ambitious goal of endowing an artificial agent with the ability to recognise and predict others' intentions. Our approach is based on the associative self-organising map, a variant of the self-organising map capable of learning to associate its activity with different inputs over time, where inputs are processed observations of others' actions. We have evaluated our system in two different experimental scenarios obtaining promising results: the system demonstrated an ability to learn discriminable representations of actions, to recognise novel input, and to simulate the likely continuation of partially seen actions.</span></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><a id="a7aaf2fadef48f492219af07e51ffaaa" class="wp-workCard--action" rel="nofollow" data-click-track="profile-work-strip-download" data-download="{"attachment_id":39509776,"asset_id":17441120,"asset_type":"Work","button_location":"profile"}" href="https://www.academia.edu/attachments/39509776/download_file?s=profile"><span><i class="fa fa-arrow-down"></i></span><span>Download</span></a><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441120"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441120"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441120; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441120]").text(description); $(".js-view-count[data-work-id=17441120]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441120; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441120']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (true){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "a7aaf2fadef48f492219af07e51ffaaa" } } $('.js-work-strip[data-work-id=17441120]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441120,"title":"Discriminating and simulating actions with the associative self-organising map","internal_url":"https://www.academia.edu/17441120/Discriminating_and_simulating_actions_with_the_associative_self_organising_map","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[{"id":39509776,"title":"","file_type":"pdf","scribd_thumbnail_url":"https://attachments.academia-assets.com/39509776/thumbnails/1.jpg","file_name":"Discriminating_and_simulating_actions_wi20151028-16066-1frmq5q.pdf","download_url":"https://www.academia.edu/attachments/39509776/download_file","bulk_download_file_name":"Discriminating_and_simulating_actions_wi.pdf","bulk_download_url":"https://d1wqtxts1xzle7.cloudfront.net/39509776/Discriminating_and_simulating_actions_wi20151028-16066-1frmq5q-libre.pdf?1446077459=\u0026response-content-disposition=attachment%3B+filename%3DDiscriminating_and_simulating_actions_wi.pdf\u0026Expires=1741087464\u0026Signature=VAbwVMqxyLRuj3G-lrYCe1cU8HGrzDOWBz9BdPhqzYJm~-YHszX0nAVUELO-XGE1X4FmYnIvnFnP-N4ah5EQNCz~9yn0nvkvYKpOES6EaUDNhWfuVz7q2oUJiaebz46cfV4F-bzNWzUAb2srD-aESietiTN9ehhHYIWvDH8MeBZce-irLwJDmhuo8MJ7q2wOpxnllco672U9S~ewvNPlxIwZ4Sm2KtvxVuQk5fSqG-ShGzs4ZBWRjI6vie81Vwdu8M9nY09rNKrnTuYTGFeHiMZuCzYLqx6PQFVV9tdR8DRm-zwv~8NIszPi2rKCutRo-ZmZSfl2DhntYyKMgU9ThQ__\u0026Key-Pair-Id=APKAJLOHF5GGSLRBV4ZA"}]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> <div class="js-work-strip profile--work_container" data-work-id="17441119"><div class="profile--work_thumbnail hidden-xs"><a class="js-work-strip-work-link" data-click-track="profile-work-strip-thumbnail" rel="nofollow" href="https://www.academia.edu/17441119/Bounded_Recursive_Self_Improvement_RU_TR_13006"><img alt="Research paper thumbnail of Bounded.Recursive.Self.Improvement.RU.TR-13006" class="work-thumbnail" src="https://a.academia-assets.com/images/blank-paper.jpg" /></a></div><div class="wp-workCard wp-workCard_itemContainer"><div class="wp-workCard_item wp-workCard--title"><a class="js-work-strip-work-link text-gray-darker" data-click-track="profile-work-strip-title" rel="nofollow" href="https://www.academia.edu/17441119/Bounded_Recursive_Self_Improvement_RU_TR_13006">Bounded.Recursive.Self.Improvement.RU.TR-13006</a></div><div class="wp-workCard_item wp-workCard--actions"><span class="work-strip-bookmark-button-container"></span><span class="wp-workCard--action visible-if-viewed-by-owner inline-block" style="display: none;"><span class="js-profile-work-strip-edit-button-wrapper profile-work-strip-edit-button-wrapper" data-work-id="17441119"><a class="js-profile-work-strip-edit-button" tabindex="0"><span><i class="fa fa-pencil"></i></span><span>Edit</span></a></span></span></div><div class="wp-workCard_item wp-workCard--stats"><span><span><span class="js-view-count view-count u-mr2x" data-work-id="17441119"><i class="fa fa-spinner fa-spin"></i></span><script>$(function () { var workId = 17441119; window.Academia.workViewCountsFetcher.queue(workId, function (count) { var description = window.$h.commaizeInt(count) + " " + window.$h.pluralize(count, 'View'); $(".js-view-count[data-work-id=17441119]").text(description); $(".js-view-count[data-work-id=17441119]").attr('title', description).tooltip(); }); });</script></span></span><span><span class="percentile-widget hidden"><span class="u-mr2x work-percentile"></span></span><script>$(function () { var workId = 17441119; window.Academia.workPercentilesFetcher.queue(workId, function (percentileText) { var container = $(".js-work-strip[data-work-id='17441119']"); container.find('.work-percentile').text(percentileText.charAt(0).toUpperCase() + percentileText.slice(1)); container.find('.percentile-widget').show(); container.find('.percentile-widget').removeClass('hidden'); }); });</script></span></div><div id="work-strip-premium-row-container"></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/work_edit-ad038b8c047c1a8d4fa01b402d530ff93c45fee2137a149a4a5398bc8ad67560.js"], function() { // from javascript_helper.rb var dispatcherData = {} if (false){ window.WowProfile.dispatcher = window.WowProfile.dispatcher || _.clone(Backbone.Events); dispatcherData = { dispatcher: window.WowProfile.dispatcher, downloadLinkId: "-1" } } $('.js-work-strip[data-work-id=17441119]').each(function() { if (!$(this).data('initialized')) { new WowProfile.WorkStripView({ el: this, workJSON: {"id":17441119,"title":"Bounded.Recursive.Self.Improvement.RU.TR-13006","internal_url":"https://www.academia.edu/17441119/Bounded_Recursive_Self_Improvement_RU_TR_13006","owner_id":37173809,"coauthors_can_edit":true,"owner":{"id":37173809,"first_name":"Haris","middle_initials":null,"last_name":"Dindo","page_name":"HarisDindo","domain_name":"independent","created_at":"2015-10-28T16:44:45.446-07:00","display_name":"Haris Dindo","url":"https://independent.academia.edu/HarisDindo"},"attachments":[]}, dispatcherData: dispatcherData }); $(this).data('initialized', true); } }); $a.trackClickSource(".js-work-strip-work-link", "profile_work_strip") }); </script> </div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js","https://a.academia-assets.com/assets/google_contacts-0dfb882d836b94dbcb4a2d123d6933fc9533eda5be911641f20b4eb428429600.js"], function() { // from javascript_helper.rb $('.js-google-connect-button').click(function(e) { e.preventDefault(); GoogleContacts.authorize_and_show_contacts(); Aedu.Dismissibles.recordClickthrough("WowProfileImportContactsPrompt"); }); $('.js-update-biography-button').click(function(e) { e.preventDefault(); Aedu.Dismissibles.recordClickthrough("UpdateUserBiographyPrompt"); $.ajax({ url: $r.api_v0_profiles_update_about_path({ subdomain_param: 'api', about: "", }), type: 'PUT', success: function(response) { location.reload(); } }); }); $('.js-work-creator-button').click(function (e) { e.preventDefault(); window.location = $r.upload_funnel_document_path({ source: encodeURIComponent(""), }); }); $('.js-video-upload-button').click(function (e) { e.preventDefault(); window.location = $r.upload_funnel_video_path({ source: encodeURIComponent(""), }); }); $('.js-do-this-later-button').click(function() { $(this).closest('.js-profile-nag-panel').remove(); Aedu.Dismissibles.recordDismissal("WowProfileImportContactsPrompt"); }); $('.js-update-biography-do-this-later-button').click(function(){ $(this).closest('.js-profile-nag-panel').remove(); Aedu.Dismissibles.recordDismissal("UpdateUserBiographyPrompt"); }); $('.wow-profile-mentions-upsell--close').click(function(){ $('.wow-profile-mentions-upsell--panel').hide(); Aedu.Dismissibles.recordDismissal("WowProfileMentionsUpsell"); }); $('.wow-profile-mentions-upsell--button').click(function(){ Aedu.Dismissibles.recordClickthrough("WowProfileMentionsUpsell"); }); new WowProfile.SocialRedesignUserWorks({ initialWorksOffset: 20, allWorksOffset: 20, maxSections: 1 }) }); </script> </div></div></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/wow_profile_edit-5ea339ee107c863779f560dd7275595239fed73f1a13d279d2b599a28c0ecd33.js","https://a.academia-assets.com/assets/add_coauthor-22174b608f9cb871d03443cafa7feac496fb50d7df2d66a53f5ee3c04ba67f53.js","https://a.academia-assets.com/assets/tab-dcac0130902f0cc2d8cb403714dd47454f11fc6fb0e99ae6a0827b06613abc20.js","https://a.academia-assets.com/assets/wow_profile-a9bf3a2bc8c89fa2a77156577594264ee8a0f214d74241bc0fcd3f69f8d107ac.js"], function() { // from javascript_helper.rb window.ae = window.ae || {}; window.ae.WowProfile = window.ae.WowProfile || {}; if(Aedu.User.current && Aedu.User.current.id === $viewedUser.id) { window.ae.WowProfile.current_user_edit = {}; new WowProfileEdit.EditUploadView({ el: '.js-edit-upload-button-wrapper', model: window.$current_user, }); new AddCoauthor.AddCoauthorsController(); } var userInfoView = new WowProfile.SocialRedesignUserInfo({ recaptcha_key: "6LdxlRMTAAAAADnu_zyLhLg0YF9uACwz78shpjJB" }); WowProfile.router = new WowProfile.Router({ userInfoView: userInfoView }); Backbone.history.start({ pushState: true, root: "/" + $viewedUser.page_name }); new WowProfile.UserWorksNav() }); </script> </div> <div class="bootstrap login"><div class="modal fade login-modal" id="login-modal"><div class="login-modal-dialog modal-dialog"><div class="modal-content"><div class="modal-header"><button class="close close" data-dismiss="modal" type="button"><span aria-hidden="true">×</span><span class="sr-only">Close</span></button><h4 class="modal-title text-center"><strong>Log In</strong></h4></div><div class="modal-body"><div class="row"><div class="col-xs-10 col-xs-offset-1"><button class="btn btn-fb btn-lg btn-block btn-v-center-content" id="login-facebook-oauth-button"><svg style="float: left; width: 19px; line-height: 1em; margin-right: .3em;" aria-hidden="true" focusable="false" data-prefix="fab" data-icon="facebook-square" class="svg-inline--fa fa-facebook-square fa-w-14" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 512"><path fill="currentColor" d="M400 32H48A48 48 0 0 0 0 80v352a48 48 0 0 0 48 48h137.25V327.69h-63V256h63v-54.64c0-62.15 37-96.48 93.67-96.48 27.14 0 55.52 4.84 55.52 4.84v61h-31.27c-30.81 0-40.42 19.12-40.42 38.73V256h68.78l-11 71.69h-57.78V480H400a48 48 0 0 0 48-48V80a48 48 0 0 0-48-48z"></path></svg><small><strong>Log in</strong> with <strong>Facebook</strong></small></button><br /><button class="btn btn-google btn-lg btn-block btn-v-center-content" id="login-google-oauth-button"><svg style="float: left; width: 22px; line-height: 1em; margin-right: .3em;" aria-hidden="true" focusable="false" data-prefix="fab" data-icon="google-plus" class="svg-inline--fa fa-google-plus fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M256,8C119.1,8,8,119.1,8,256S119.1,504,256,504,504,392.9,504,256,392.9,8,256,8ZM185.3,380a124,124,0,0,1,0-248c31.3,0,60.1,11,83,32.3l-33.6,32.6c-13.2-12.9-31.3-19.1-49.4-19.1-42.9,0-77.2,35.5-77.2,78.1S142.3,334,185.3,334c32.6,0,64.9-19.1,70.1-53.3H185.3V238.1H302.2a109.2,109.2,0,0,1,1.9,20.7c0,70.8-47.5,121.2-118.8,121.2ZM415.5,273.8v35.5H380V273.8H344.5V238.3H380V202.8h35.5v35.5h35.2v35.5Z"></path></svg><small><strong>Log in</strong> with <strong>Google</strong></small></button><br /><style type="text/css">.sign-in-with-apple-button { width: 100%; height: 52px; border-radius: 3px; border: 1px solid black; cursor: pointer; } .sign-in-with-apple-button > div { margin: 0 auto; / This centers the Apple-rendered button horizontally }</style><script src="https://appleid.cdn-apple.com/appleauth/static/jsapi/appleid/1/en_US/appleid.auth.js" type="text/javascript"></script><div class="sign-in-with-apple-button" data-border="false" data-color="white" id="appleid-signin"><span ="Sign Up with Apple" class="u-fs11"></span></div><script>AppleID.auth.init({ clientId: 'edu.academia.applesignon', scope: 'name email', redirectURI: 'https://www.academia.edu/sessions', state: "870b0b3b3c1c2c2e543cbf27b38fa38b33c858fcb7f8095a39d08b3b8b1129d2", });</script><script>// Hacky way of checking if on fast loswp if (window.loswp == null) { (function() { const Google = window?.Aedu?.Auth?.OauthButton?.Login?.Google; const Facebook = window?.Aedu?.Auth?.OauthButton?.Login?.Facebook; if (Google) { new Google({ el: '#login-google-oauth-button', rememberMeCheckboxId: 'remember_me', track: null }); } if (Facebook) { new Facebook({ el: '#login-facebook-oauth-button', rememberMeCheckboxId: 'remember_me', track: null }); } })(); }</script></div></div></div><div class="modal-body"><div class="row"><div class="col-xs-10 col-xs-offset-1"><div class="hr-heading login-hr-heading"><span class="hr-heading-text">or</span></div></div></div></div><div class="modal-body"><div class="row"><div class="col-xs-10 col-xs-offset-1"><form class="js-login-form" action="https://www.academia.edu/sessions" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="biuEpjSe4O8-mfJHthSHq4Qb-QcYLCOVq6PqLIkmvU0CuxqBeH3KmE6mEZK4q8oifGroJOvhBZcxhlgyJDMD8Q" autocomplete="off" /><div class="form-group"><label class="control-label" for="login-modal-email-input" style="font-size: 14px;">Email</label><input class="form-control" id="login-modal-email-input" name="login" type="email" /></div><div class="form-group"><label class="control-label" for="login-modal-password-input" style="font-size: 14px;">Password</label><input class="form-control" id="login-modal-password-input" name="password" type="password" /></div><input type="hidden" name="post_login_redirect_url" id="post_login_redirect_url" value="https://independent.academia.edu/HarisDindo" autocomplete="off" /><div class="checkbox"><label><input type="checkbox" name="remember_me" id="remember_me" value="1" checked="checked" /><small style="font-size: 12px; margin-top: 2px; display: inline-block;">Remember me on this computer</small></label></div><br><input type="submit" name="commit" value="Log In" class="btn btn-primary btn-block btn-lg js-login-submit" data-disable-with="Log In" /></br></form><script>typeof window?.Aedu?.recaptchaManagedForm === 'function' && window.Aedu.recaptchaManagedForm( document.querySelector('.js-login-form'), document.querySelector('.js-login-submit') );</script><small style="font-size: 12px;"><br />or <a data-target="#login-modal-reset-password-container" data-toggle="collapse" href="javascript:void(0)">reset password</a></small><div class="collapse" id="login-modal-reset-password-container"><br /><div class="well margin-0x"><form class="js-password-reset-form" action="https://www.academia.edu/reset_password" accept-charset="UTF-8" method="post"><input type="hidden" name="authenticity_token" value="ww4o7ZGWfry8iHxI0cXVHfHoejwWjuSYPM10FODaDSCvnrbK3XVUy8y3n53fepiUCZlrH-VDwpqm6MYKTc-znA" autocomplete="off" /><p>Enter the email address you signed up with and we'll email you a reset link.</p><div class="form-group"><input class="form-control" name="email" type="email" /></div><script src="https://recaptcha.net/recaptcha/api.js" async defer></script> <script> var invisibleRecaptchaSubmit = function () { var closestForm = function (ele) { var curEle = ele.parentNode; while (curEle.nodeName !== 'FORM' && curEle.nodeName !== 'BODY'){ curEle = curEle.parentNode; } return curEle.nodeName === 'FORM' ? curEle : null }; var eles = document.getElementsByClassName('g-recaptcha'); if (eles.length > 0) { var form = closestForm(eles[0]); if (form) { form.submit(); } } }; </script> <input type="submit" data-sitekey="6Lf3KHUUAAAAACggoMpmGJdQDtiyrjVlvGJ6BbAj" data-callback="invisibleRecaptchaSubmit" class="g-recaptcha btn btn-primary btn-block" value="Email me a link" value=""/> </form></div></div><script> require.config({ waitSeconds: 90 })(["https://a.academia-assets.com/assets/collapse-45805421cf446ca5adf7aaa1935b08a3a8d1d9a6cc5d91a62a2a3a00b20b3e6a.js"], function() { // from javascript_helper.rb $("#login-modal-reset-password-container").on("shown.bs.collapse", function() { $(this).find("input[type=email]").focus(); }); }); </script> </div></div></div><div class="modal-footer"><div class="text-center"><small style="font-size: 12px;">Need an account? <a rel="nofollow" href="https://www.academia.edu/signup">Click here to sign up</a></small></div></div></div></div></div></div><script>// If we are on subdomain or non-bootstrapped page, redirect to login page instead of showing modal (function(){ if (typeof $ === 'undefined') return; var host = window.location.hostname; if ((host === $domain || host === "www."+$domain) && (typeof $().modal === 'function')) { $("#nav_log_in").click(function(e) { // Don't follow the link and open the modal e.preventDefault(); $("#login-modal").on('shown.bs.modal', function() { $(this).find("#login-modal-email-input").focus() }).modal('show'); }); } })()</script> <div class="bootstrap" id="footer"><div class="footer-content clearfix text-center padding-top-7x" style="width:100%;"><ul class="footer-links-secondary footer-links-wide list-inline margin-bottom-1x"><li><a href="https://www.academia.edu/about">About</a></li><li><a href="https://www.academia.edu/press">Press</a></li><li><a href="https://www.academia.edu/documents">Papers</a></li><li><a href="https://www.academia.edu/topics">Topics</a></li><li><a href="https://www.academia.edu/journals">Academia.edu Journals</a></li><li><a rel="nofollow" href="https://www.academia.edu/hiring"><svg style="width: 13px; height: 13px;" aria-hidden="true" focusable="false" data-prefix="fas" data-icon="briefcase" class="svg-inline--fa fa-briefcase fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M320 336c0 8.84-7.16 16-16 16h-96c-8.84 0-16-7.16-16-16v-48H0v144c0 25.6 22.4 48 48 48h416c25.6 0 48-22.4 48-48V288H320v48zm144-208h-80V80c0-25.6-22.4-48-48-48H176c-25.6 0-48 22.4-48 48v48H48c-25.6 0-48 22.4-48 48v80h512v-80c0-25.6-22.4-48-48-48zm-144 0H192V96h128v32z"></path></svg> <strong>We're Hiring!</strong></a></li><li><a rel="nofollow" href="https://support.academia.edu/hc/en-us"><svg style="width: 12px; height: 12px;" aria-hidden="true" focusable="false" data-prefix="fas" data-icon="question-circle" class="svg-inline--fa fa-question-circle fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M504 256c0 136.997-111.043 248-248 248S8 392.997 8 256C8 119.083 119.043 8 256 8s248 111.083 248 248zM262.655 90c-54.497 0-89.255 22.957-116.549 63.758-3.536 5.286-2.353 12.415 2.715 16.258l34.699 26.31c5.205 3.947 12.621 3.008 16.665-2.122 17.864-22.658 30.113-35.797 57.303-35.797 20.429 0 45.698 13.148 45.698 32.958 0 14.976-12.363 22.667-32.534 33.976C247.128 238.528 216 254.941 216 296v4c0 6.627 5.373 12 12 12h56c6.627 0 12-5.373 12-12v-1.333c0-28.462 83.186-29.647 83.186-106.667 0-58.002-60.165-102-116.531-102zM256 338c-25.365 0-46 20.635-46 46 0 25.364 20.635 46 46 46s46-20.636 46-46c0-25.365-20.635-46-46-46z"></path></svg> <strong>Help Center</strong></a></li></ul><ul class="footer-links-tertiary list-inline margin-bottom-1x"><li class="small">Find new research papers in:</li><li class="small"><a href="https://www.academia.edu/Documents/in/Physics">Physics</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Chemistry">Chemistry</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Biology">Biology</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Health_Sciences">Health Sciences</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Ecology">Ecology</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Earth_Sciences">Earth Sciences</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Cognitive_Science">Cognitive Science</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Mathematics">Mathematics</a></li><li class="small"><a href="https://www.academia.edu/Documents/in/Computer_Science">Computer Science</a></li></ul></div></div><div class="DesignSystem" id="credit" style="width:100%;"><ul class="u-pl0x footer-links-legal list-inline"><li><a rel="nofollow" href="https://www.academia.edu/terms">Terms</a></li><li><a rel="nofollow" href="https://www.academia.edu/privacy">Privacy</a></li><li><a rel="nofollow" href="https://www.academia.edu/copyright">Copyright</a></li><li>Academia ©2025</li></ul></div><script> //<![CDATA[ window.detect_gmtoffset = true; window.Academia && window.Academia.set_gmtoffset && Academia.set_gmtoffset('/gmtoffset'); //]]> </script> <div id='overlay_background'></div> <div id='bootstrap-modal-container' class='bootstrap'></div> <div id='ds-modal-container' class='bootstrap DesignSystem'></div> <div id='full-screen-modal'></div> </div> </body> </html>