CINXE.COM
Robotics goes PRISMA | Robotica | Cambridge Core
<!DOCTYPE html> <!--[if IE 8 ]> <html class="ie8"> <![endif]--> <!--[if IE 9 ]> <html class="ie9"> <![endif]--> <!--[if (gt IE 9)|!(IE)]><!--> <html class="no-js" lang="en"> <!--<![endif]--> <head> <!-- meta tags & title for page component --> <!-- system header start --> <meta charset="utf-8" /> <meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1"> <meta name="viewport" content="width=device-width, initial-scale=1.0" /> <title>Robotics goes PRISMA | Robotica | Cambridge Core</title> <script src="/core/vanilla/public/js/sentry.min.js"></script> <script> let isTerminated = false; window.addEventListener('pagehide', (event) => { isTerminated = !event.persisted; }, { capture: true }); const EXTRA_KEY = "ROUTE_TO"; Sentry.init({ dsn: "https://074fed417c764caaafebad958d2c0c95@o1239501.ingest.sentry.io/6395238", release: "www.cambridge.org" + "@" + "unreadable", environment: "prod", ignoreErrors: [], integrations: [Sentry.moduleMetadataIntegration()], transport: Sentry.makeMultiplexedTransport( Sentry.makeFetchTransport, (args) => { const event = args.getEvent(); if ( event && event.extra && EXTRA_KEY in event.extra && Array.isArray(event.extra[EXTRA_KEY]) ) { return event.extra[EXTRA_KEY]; } return []; }, ), beforeSend: (event) => { if (isTerminated || (window.location.protocol !== 'http:' && window.location.protocol !== 'https:')) { return null; } if (event?.exception?.values?.[0].stacktrace?.frames) { const { frames } = event.exception.values[0].stacktrace; const routeTo = frames .filter((frame) => frame.module_metadata && frame.module_metadata.dsn) .map((v) => v.module_metadata) .slice(-1); if (routeTo.length) { event.extra = { ...event.extra, [EXTRA_KEY]: routeTo, }; } } return event; }, }); </script> <!-- need to loop through block page defined header includes --> <!-- system header finish --> <meta property="og:site_name" content="Cambridge Core"/> <meta property="og:type" content="website"/> <meta property="og:url" content="https://www.cambridge.org/core/journals/robotica/article/robotics-goes-prisma/5DA1E6B0701411F71E5FFC40F2E53847"/> <meta property="og:title" content="Robotics goes PRISMA | Robotica | Cambridge Core"/> <meta property="og:description" content="Robotics goes PRISMA"/> <meta property="og:image" content="https://static.cambridge.org/covers/ROB_0_0_0/robotica.jpg?send-full-size-image=true"/> <meta name="description" content="Robotics goes PRISMA"> <link rel="canonical" href="https://www.cambridge.org/core/journals/robotica/article/robotics-goes-prisma/5DA1E6B0701411F71E5FFC40F2E53847"> <meta name="dc.identifier" content="doi:10.1017/S026357472400033X"> <meta name="citation_journal_title" content="Robotica"> <meta name="citation_publisher" content="Cambridge University Press"> <meta name="citation_title" content="Robotics goes PRISMA"> <meta name="citation_author" content="Mario Selvaggio"> <meta name="citation_author_orcid" content="0000-0002-2460-1914"> <meta name="citation_author" content="Rocco Moccia"> <meta name="citation_author" content="Pierluigi Arpenti"> <meta name="citation_author" content="Riccardo Caccavale"> <meta name="citation_author" content="Fabio Ruggiero"> <meta name="citation_author" content="Jonathan Cacace"> <meta name="citation_author" content="Fanny Ficuciello"> <meta name="citation_author" content="Alberto Finzi"> <meta name="citation_author" content="Vincenzo Lippiello"> <meta name="citation_author" content="Luigi Villani"> <meta name="citation_author" content="Bruno Siciliano"> <meta name="citation_online_date" content="2024/03/20"> <meta name="citation_firstpage" content="1"> <meta name="citation_lastpage" content="28"> <meta name="citation_issn" content="0263-5747"> <meta name="citation_issn" content="1469-8668"> <meta name="citation_keywords" content="aerial robotics; control of robotic systems; legged robots; non-prehensile manipulation; surgical robots; teleoperation"> <meta name="citation_pdf_url" content="https://www.cambridge.org/core/services/aop-cambridge-core/content/view/5DA1E6B0701411F71E5FFC40F2E53847/S026357472400033Xa.pdf/div-class-title-robotics-goes-prisma-div.pdf"> <meta name="citation_reference" content="citation_title=A survey of robot manipulation in contact; citation_author=Suomalainen, M.; citation_author=Karayiannidis, Y.; citation_author=Kyrki, V.; citation_publication_date=2022; citation_journal_title=Robot Auton Syst; citation_volume=156; citation_firstpage=104224; citation_doi=10.1016/j.robot.2022.104224"> <meta name="citation_reference" content="citation_title=Dynamic Legged Manipulation of a Ball Through Multi-Contact Optimization; citation_author=Yang, C.; citation_author=Zhang, B.; citation_author=Zeng, J.; citation_author=Agrawal, A.; citation_author=Sreenath, K.; citation_publication_date=2020"> <meta name="citation_reference" content="citation_title=Control of nonprehensile planar rolling manipulation: A passivity-based approach; citation_author=Serra, D.; citation_author=Ruggiero, F.; citation_author=Donaire, A.; citation_author=Buonocore, L. R.; citation_author=Lippiello, V.; citation_author=Siciliano, B.; citation_publication_date=2019; citation_journal_title=IEEE Trans Robot; citation_volume=35; citation_firstpage=317; citation_lastpage=329; citation_doi=10.1109/TRO.2018.2887356"> <meta name="citation_reference" content="citation_title=Object rearrangement through planar pushing: A theoretical analysis and validation; citation_author=Chai, C.-Y.; citation_author=Peng, W.-H.; citation_author=Tsao, S.-L.; citation_publication_date=2022; citation_journal_title=IEEE T Robot; citation_volume=38; citation_firstpage=2703; citation_lastpage=2719; citation_doi=10.1109/TRO.2022.3153785"> <meta name="citation_reference" content="citation_title=Coordinate-Free Framework for Robotic Pizza Tossing and Catching; citation_author=Satici, A.; citation_author=Ruggiero, F.; citation_author=Lippiello, V.; citation_author=Siciliano, B.; citation_publication_date=2016"> <meta name="citation_reference" content="citation_title=Nonprehensile dynamic manipulation: A survey; citation_author=Ruggiero, F.; citation_author=Lippiello, V.; citation_author=Siciliano, B.; citation_publication_date=2018; citation_journal_title=IEEE Robot Auto Lett; citation_volume=3; citation_firstpage=1711; citation_lastpage=1718; citation_doi=10.1109/LRA.2018.2801939"> <meta name="citation_reference" content="citation_title=Forces acting on a biped robot. center of pressure-zero moment point; citation_author=Sardain, P.; citation_author=Bessonnet, G.; citation_publication_date=2004; citation_journal_title=IEEE Trans Syst, Man, Cyber - Part A: Syst Humans; citation_volume=34; citation_firstpage=630; citation_lastpage=637; citation_doi=10.1109/TSMCA.2004.832811"> <meta name="citation_reference" content="citation_title=Review and descriptive investigation of the connection between bipedal locomotion and non-prehensile manipulation; citation_author=Farid, Y.; citation_author=Siciliano, B.; citation_author=Ruggiero, F.; citation_publication_date=2022; citation_journal_title=Annu Rev Control; citation_volume=53; citation_firstpage=51; citation_lastpage=69; citation_doi=10.1016/j.arcontrol.2022.04.009"> <meta name="citation_reference" content="citation_title=Passive dynamic walking; citation_author=McGeer, T.; citation_publication_date=1990; citation_journal_title=Int J Robot Res; citation_volume=9; citation_firstpage=62; citation_lastpage=82; citation_doi=10.1177/027836499000900206"> <meta name="citation_reference" content="citation_title=Kinetic Energy Shaping for Gait Regulation of Underactuated Bipeds; citation_author=Holm, J.; citation_author=Spong, M.; citation_publication_date=2008"> <meta name="citation_reference" content="citation_title=Controlled Symmetries and Passive Walking; citation_author=Spong, M.; citation_author=Bullo, F.; citation_publication_date=2002; citation_inbook=Proceeding IFAC Triennal World Congress"> <meta name="citation_reference" content="citation_title=Passivity-Based Control of Bipedal Locomotion; citation_author=Spong, M.; citation_author=Holm, J.; citation_author=Lee, D.; citation_publication_date=2007; citation_inbook=IEEE Robotics & Automation Magazine; citation_firstpage=30; citation_lastpage=40"> <meta name="citation_reference" content="citation_title=Novel method for preventing shin-collisions in six-legged robots by utilising a robot–terrain interference model; citation_author=Mao, L.; citation_author=Gao, F.; citation_author=Tian, Y.; citation_author=Zhao, Y.; citation_publication_date=2020; citation_journal_title=Mech Mach Theory; citation_volume=151; citation_firstpage=103897; citation_doi=10.1016/j.mechmachtheory.2020.103897"> <meta name="citation_reference" content="citation_title=Contact Model Fusion for Event-Based Locomotion in Unstructured Terrains; citation_author=Bledt, G.; citation_author=Wensing, P. M.; citation_author=Ingersoll, S.; citation_author=Kim, S.; citation_publication_date=2018"> <meta name="citation_reference" content="citation_title=Passive whole-body control for quadruped robots: Experimental validation over challenging terrain; citation_author=Fahmi, S.; citation_author=Mastalli, C.; citation_author=Focchi, M.; citation_author=Semini, C.; citation_publication_date=2019; citation_journal_title=IEEE Robot Auto Lett; citation_volume=4; citation_firstpage=2553; citation_lastpage=2560; citation_doi=10.1109/LRA.2019.2908502"> <meta name="citation_reference" content="citation_title=Linear Time-Varying mpc for Nonprehensile Object Manipulation with a Nonholonomic Mobile Robot; citation_author=Bertoncelli, F.; citation_author=Ruggiero, F.; citation_author=Sabattini, L.; citation_publication_date=2020"> <meta name="citation_reference" content="citation_title=Task-Oriented Contact Optimization for Pushing Manipulation with Mobile Robots; citation_author=Bertoncelli, F.; citation_author=Selvaggio, M.; citation_author=Ruggiero, F.; citation_author=Sabattini, L.; citation_publication_date=2022"> <meta name="citation_reference" content="citation_title=Nonprehensile Object Transportation with a Legged Manipulator; citation_author=Morlando, V.; citation_author=Selvaggio, M.; citation_author=Ruggiero, F.; citation_publication_date=2022"> <meta name="citation_reference" content="citation_title=A shared-control teleoperation architecture for nonprehensile object transportation; citation_author=Selvaggio, M.; citation_author=Cacace, J.; citation_author=Pacchierotti, C.; citation_author=Ruggiero, F.; citation_author=Giordano, P. R.; citation_publication_date=2022; citation_journal_title=IEEE Trans Robot; citation_volume=38; citation_firstpage=569; citation_lastpage=583; citation_doi=10.1109/TRO.2021.3086773"> <meta name="citation_reference" content="citation_title=Non-prehensile object transportation via model predictive non-sliding manipulation control; citation_author=Selvaggio, M.; citation_author=Garg, A.; citation_author=Ruggiero, F.; citation_author=Oriolo, G.; citation_author=Siciliano, B.; citation_publication_date=2023; citation_journal_title=IEEE Trans Contr Syst T; citation_volume=31; citation_firstpage=2231; citation_lastpage=2244; citation_doi=10.1109/TCST.2023.3277224"> <meta name="citation_reference" content="citation_title=A non-prehensile object transportation framework with adaptive tilting based on quadratic programming; citation_author=Subburaman, R.; citation_author=Selvaggio, M.; citation_author=Ruggiero, F.; citation_publication_date=2023; citation_journal_title=IEEE Robot Auto Lett; citation_volume=8; citation_firstpage=3581; citation_lastpage=3588; citation_doi=10.1109/LRA.2023.3268594"> <meta name="citation_reference" content="citation_title=Passivity-based control for a rolling-balancing system: The nonprehensile disk-on-disk; citation_author=Donaire, A.; citation_author=Ruggiero, F.; citation_author=Buonocore, L. R.; citation_author=Lippiello, V.; citation_author=Siciliano, B.; citation_publication_date=2017; citation_journal_title=IEEE Trans Contr Syst Tech; citation_volume=25; citation_firstpage=2135; citation_lastpage=2142; citation_doi=10.1109/TCST.2016.2637719"> <meta name="citation_reference" content="citation_title=A new laparoscopic tool with in-hand rolling capabilities for needle reorientation; citation_author=Fontanelli, G. A.; citation_author=Selvaggio, M.; citation_author=Buonocore, L. R.; citation_author=Ficuciello, F.; citation_author=Villani, L.; citation_author=Siciliano, B.; citation_publication_date=2018; citation_journal_title=IEEE Robot Autom Lett; citation_volume=3; citation_firstpage=2354; citation_lastpage=2361; citation_doi=10.1109/LRA.2018.2809443"> <meta name="citation_reference" content="citation_title=Nonprehensile manipulation of an underactuated mechanical system with second-order nonholonomic constraints: The robotic hula-hoop; citation_author=Gutiérrez-Giles, A.; citation_author=Ruggiero, F.; citation_author=Lippiello, V.; citation_author=Siciliano, B.; citation_publication_date=2018; citation_journal_title=IEEE Robot Autom Lett; citation_volume=3; citation_firstpage=1136; citation_lastpage=1143; citation_doi=10.1109/LRA.2018.2792403"> <meta name="citation_reference" content="citation_title=Hybrid visual servoing with hierarchical task composition for aerial manipulation; citation_author=Lippiello, V.; citation_author=Cacace, J.; citation_author=Santamaria-Navarro, A.; citation_author=Andrade-Cetto, J.; citation_author=Trujillo, M.; citation_author=Esteves, Y.; citation_author=Viguria, A.; citation_publication_date=2016; citation_journal_title=IEEE Robot Auto Lett; citation_volume=1; citation_firstpage=259; citation_lastpage=266; citation_doi=10.1109/LRA.2015.2510749"> <meta name="citation_reference" content="citation_title=Control of nonprehensile rolling manipulation: Balancing a disk on a disk; citation_author=Ryu, J.-C.; citation_author=Ruggiero, F.; citation_author=Lynch, K. M.; citation_publication_date=2013; citation_journal_title=IEEE Trans Robot; citation_volume=29; citation_firstpage=1152; citation_lastpage=1161; citation_doi=10.1109/TRO.2013.2262775"> <meta name="citation_reference" content="citation_title=On the Experiments about the Nonprehensile Reconfiguration of a Rolling Sphere on a Plate; citation_author=Serra, D.; citation_author=Ferguson, J.; citation_author=Ruggiero, F.; citation_author=Siniscalco, A.; citation_author=Petit, A.; citation_author=Lippiello, V.; citation_author=Siciliano, B.; citation_publication_date=2018"> <meta name="citation_reference" content="citation_title=Cosed-Loop Control of a Nonprehensile Manipulation System Inspired by a Pizza-Peel Mechanism; citation_author=Gutiérrez-Giles, A.; citation_author=Ruggiero, F.; citation_author=Lippiello, V.; citation_author=Siciliano, B.; citation_publication_date=2019"> <meta name="citation_reference" content="citation_title=Planning Framework for Robotic Pizza Dough Stretching with a Rolling Pin; citation_author=Kim, J.-T.; citation_author=Ruggiero, F.; citation_author=Lippiello, V.; citation_author=Siciliano, B.; citation_author=Siciliano, B.; citation_author=Ruggiero, F.; citation_publication_date=2022; citation_inbook=Robot Dynamic Manipulation. Perception of Deformable Objects and Nonprehensile Manipulation Control; citation_firstpage=229; citation_lastpage=253"> <meta name="citation_reference" content="citation_title=Nonprehensile Manipulation Control and Task Planning for Deformable Object Manipulation: Results From the RoDyMan Project; citation_author=Ruggiero, F.; citation_author=Kim, J.-T.; citation_author=Gutiérrez-Giles, A.; citation_author=Satici, A.; citation_author=Donaire, A.; citation_author=Cacace, J.; citation_author=Buonocore, L. R.; citation_author=Fontanelli, G. A.; citation_author=Lippiello, V.; citation_author=Siciliano, B.; citation_author=Gusikhin, O.; citation_author=Madani, K.; citation_publication_date=2020; citation_inbook=Informatics in Control, Automation and Robotics, Lecture Notes in Electrical Engineering; citation_firstpage=76; citation_lastpage=100"> <meta name="citation_reference" content="citation_title=Nonprehensile manipulation of deformable objects: Achievements and perspectives from the roDyMan project; citation_author=Ruggiero, F.; citation_author=Petit, A.; citation_author=Serra, D.; citation_author=Satici, A. C.; citation_author=Cacace, J.; citation_author=Donaire, A.; citation_author=Ficuciello, F.; citation_author=Buonocore, L. R.; citation_author=Fontanelli, G. A.; citation_author=Lippiello, V.; citation_author=Villani, L.; citation_author=Siciliano, B.; citation_publication_date=2018; citation_journal_title=IEEE Robot Autom Mag; citation_volume=25; citation_firstpage=83; citation_lastpage=92; citation_doi=10.1109/MRA.2017.2781306"> <meta name="citation_reference" content="citation_title=Energy Pumping-and-Damping for Gait Robustification of Underactuated Planar Biped Robots Within the Hybrid Zero Dynamics Framework; citation_author=Arpenti, P.; citation_author=Donaire, A.; citation_author=Ruggiero, F.; citation_author=Lippiello, V.; citation_publication_date=2020"> <meta name="citation_reference" content="citation_title=Uniform global exponential stabilizing passivity-based tracking controller applied to planar biped robots; citation_author=Arpenti, P.; citation_author=Donaire, A.; citation_author=Ruggiero, F.; citation_author=Lippiello, V.; citation_publication_date=2022"> <meta name="citation_reference" content="citation_title=Interconnection and Damping Assignment Passivity-Based Control for Gait Generation in Underactuated Compass-like Robots; citation_author=Arpenti, P.; citation_author=Ruggiero, F.; citation_author=Lippiello, V.; citation_publication_date=2020"> <meta name="citation_reference" content="citation_title=A constructive methodology for the IDA-PBC of underactuated 2-doF mechanical systems with explicit solution of PDEs; citation_author=Arpenti, P.; citation_author=Ruggiero, F.; citation_author=Lippiello, V.; citation_publication_date=2022; citation_journal_title=Int J Control, Auto Syst; citation_volume=20; citation_firstpage=283; citation_lastpage=297; citation_doi=10.1007/s12555-020-0839-1"> <meta name="citation_reference" content="citation_title=Gait Generation for Underactuated Compass-Like Robots Using Dissipative Forces in the Controller; citation_author=Nacusse, M.; citation_author=Arpenti, P.; citation_author=Ruggiero, F.; citation_author=Lippiello, V.; citation_publication_date=2020; citation_inbook=IFAC-PapersOnLine; citation_firstpage=9023; citation_lastpage=9030"> <meta name="citation_reference" content="citation_title=Tethering a Human with a Quadruped Robot: A Guide Dog to Help Visually Impaired People; citation_author=Morlando, V.; citation_author=Lippiello, V.; citation_author=Ruggiero, F.; citation_publication_date=2023"> <meta name="citation_reference" content="citation_title=Disturbance Rejection for Legged Robots Through a Hybrid Observer; citation_author=Morlando, V.; citation_author=Ruggiero, F.; citation_publication_date=2022"> <meta name="citation_reference" content="citation_title=Whole-body control with disturbance rejection through a momentum-based observer for quadruped robots; citation_author=Morlando, V.; citation_author=Teimoorzadeh, A.; citation_author=Ruggiero, F.; citation_publication_date=2021; citation_journal_title=Mech Mach Theory; citation_volume=164; citation_firstpage=104412; citation_doi=10.1016/j.mechmachtheory.2021.104412"> <meta name="citation_reference" content="citation_title=The effect of shapes in input-state linearization for stabilization of nonprehensile planar rolling dynamic manipulation; citation_author=Lippiello, V.; citation_author=Ruggiero, F.; citation_author=Siciliano, B.; citation_publication_date=2016; citation_journal_title=IEEE Robot Autom Lett; citation_volume=1; citation_firstpage=492; citation_lastpage=499; citation_doi=10.1109/LRA.2016.2519147"> <meta name="citation_reference" content="citation_title=Keep it upright: Model predictive control for nonprehensile object transportation with obstacle avoidance on a mobile manipulator; citation_author=Heins, A.; citation_author=Schoellig, A. P.; citation_publication_date=2023; citation_journal_title=IEEE Robot Auto Lett; citation_volume=8; citation_firstpage=7986; citation_lastpage=7993; citation_doi=10.1109/LRA.2023.3324520"> <meta name="citation_reference" content="citation_title=A Solution to Slosh-Free Robot Trajectory Optimization; citation_author=Muchacho, R. I. C.; citation_author=Laha, R.; citation_author=Figueredo, L. F.; citation_author=Haddadin, S.; citation_publication_date=2022"> <meta name="citation_reference" content="citation_title=Shared Autonomy Control for Slosh-Free Teleoperation; citation_author=Muchacho, R. I. C.; citation_author=Bien, S.; citation_author=Laha, R.; citation_author=Naceri, A.; citation_author=Figueredo, L. F.; citation_author=Haddadin, S.; citation_publication_date=2023"> <meta name="citation_reference" content="citation_title=Push-manipulation of complex passive mobile objects using experimentally acquired motion models; citation_author=Meriçli, T.; citation_author=Veloso, M.; citation_author=Akın, H. L.; citation_publication_date=2015; citation_journal_title=Auton Robot; citation_volume=38; citation_firstpage=317; citation_lastpage=329; citation_doi=10.1007/s10514-014-9414-z"> <meta name="citation_reference" content="citation_title=A model predictive approach for online mobile manipulation of non-holonomic objects using learned dynamics; citation_author=Novin, R. S.; citation_author=Yazdani, A.; citation_author=Merryweather, A.; citation_author=Hermans, T.; citation_publication_date=2021; citation_journal_title=Int J Robot Res; citation_volume=40; citation_firstpage=815; citation_lastpage=831; citation_doi=10.1177/0278364921992793"> <meta name="citation_reference" content="citation_title=Robust trajectory tracking for a scale model autonomous helicopter; citation_author=Mahony, R.; citation_author=Hamel, T.; citation_publication_date=2004; citation_journal_title=Int J Robust Nonlin; citation_volume=14; citation_firstpage=1035; citation_lastpage=1059; citation_doi=10.1002/rnc.931"> <meta name="citation_reference" content="citation_title=Autonomous Flying Robots; citation_author=Nonami, K.; citation_author=Kendoul, F.; citation_author=Suzuki, S.; citation_author=Wang, W.; citation_publication_date=2010; citation_inbook=Unmanned Aerial Vehicles and Micro Aerial Vehicles"> <meta name="citation_reference" content="citation_title=Aerial Grasping of a Moving Target with a Quadrotor UAV; citation_author=Spica, R.; citation_author=Franchi, A.; citation_author=Oriolo, G.; citation_author=Bülthoff, H.; citation_author=Giordano, P. R.; citation_publication_date=2012"> <meta name="citation_reference" content="citation_title=Experimental Validation of a New Adaptive Control Scheme for Quadrotors MAVs; citation_author=Antonelli, G.; citation_author=Cataldi, E.; citation_author=Giordano, P. R.; citation_author=Chiaverini, S.; citation_author=Franchi, A.; citation_publication_date=2013"> <meta name="citation_reference" content="citation_title=Adaptive control of quadrotor UAVs: A design trade study with flight evaluations; citation_author=Dydek, Z.; citation_author=Annaswamy, A.; citation_author=Lavretsky, E.; citation_publication_date=2013; citation_journal_title=IEEE Trans Contr Syst Tech; citation_volume=21; citation_firstpage=1400; citation_lastpage=1406; citation_doi=10.1109/TCST.2012.2200104"> <meta name="citation_reference" content="citation_title=Adaptive position tracking of VTOL UAVs; citation_author=Roberts, A.; citation_author=Tayebi, A.; citation_publication_date=2011; citation_journal_title=IEEE Trans Robot; citation_volume=27; citation_firstpage=129; citation_lastpage=142; citation_doi=10.1109/TRO.2010.2092870"> <meta name="citation_reference" content="citation_title=A Nonlinear Force Observer for Quadrotors and Application to Physical Interactive Tasks; citation_author=Yüksel, B.; citation_author=Secchi, C.; citation_author=Bülthoff, H.; citation_author=Franchi, A.; citation_publication_date=2014"> <meta name="citation_reference" content="citation_title=Passivity-based adaptive attitude control of a rigid spacecraft; citation_author=Egeland, O.; citation_author=Godhavn, J.-M.; citation_publication_date=1994; citation_journal_title=IEEE Trans Automat Contr; citation_volume=39; citation_firstpage=842; citation_lastpage=846; citation_doi=10.1109/9.286266"> <meta name="citation_reference" content="citation_title=Passivity-based adaptive backstepping control of quadrotor-type UAVs; citation_author=Ha, C.; citation_author=Zuo, Z.; citation_author=Choi, F.; citation_author=Lee, D.; citation_publication_date=2014; citation_journal_title=Robot Auton Syst; citation_volume=62; citation_firstpage=1305; citation_lastpage=1315; citation_doi=10.1016/j.robot.2014.03.019"> <meta name="citation_reference" content="citation_title=Advances in Unmanned Aerial Vehicles: State of the Art and the Road to Autonomy volume 33 of Intelligent Systems, Control and Automation: Science and Engineering; citation_author=Valvanis, K.; citation_publication_date=2007"> <meta name="citation_reference" content="citation_title=Handbook of Unmanned Aerial Vehicles; citation_author=Valvanis, K.; citation_author=Vachtsevanos, G.; citation_publication_date=2015"> <meta name="citation_reference" content="citation_title=Past, present, and future of aerial robotic manipulators; citation_author=Oller, A.; citation_author=Tognon, M.; citation_author=Suarez, A.; citation_author=Lee, D.; citation_author=Franchi, A.; citation_publication_date=2022; citation_journal_title=IEEE Trans Robot; citation_volume=38; citation_firstpage=626; citation_lastpage=645; citation_doi=10.1109/TRO.2021.3084395"> <meta name="citation_reference" content="citation_title=Aerial manipulation: A literature review; citation_author=Ruggiero, F.; citation_author=Lippiello, V.; citation_author=Ollero, A.; citation_publication_date=2018; citation_journal_title=IEEE Robot Auto Lett; citation_volume=3; citation_firstpage=1957; citation_lastpage=1964; citation_doi=10.1109/LRA.2018.2808541"> <meta name="citation_reference" content="citation_title=Passivity-based control of vtol uavs with a momentum-based estimator of external wrench and unmodeled dynamics; citation_author=Ruggiero, F.; citation_author=Cacace, J.; citation_author=Sadeghian, H.; citation_author=Lippiello, V.; citation_publication_date=2015; citation_journal_title=Robot Auton Syst; citation_volume=72; citation_firstpage=139; citation_lastpage=151; citation_doi=10.1016/j.robot.2015.05.006"> <meta name="citation_reference" content="citation_title=Active disturbance rejection control for the robust flight of a passively tilted hexarotor; citation_author=Sotos, S. M. O.; citation_author=Cacace, J.; citation_author=Ruggiero, F.; citation_author=Lippiello, V.; citation_publication_date=2022; citation_journal_title=Drones; citation_volume=6; citation_firstpage=250"> <meta name="citation_reference" content="citation_title=Globally attractive hyperbolic control for the robust flight of an actively tilting quadrotor; citation_author=Sotos, S. M. O.; citation_author=Ruggiero, F.; citation_author=Lippiello, V.; citation_publication_date=2022; citation_journal_title=Drones; citation_volume=6; citation_firstpage=373"> <meta name="citation_reference" content="citation_title=Emergency Landing for a Quadrotor in Case of a Propeller Failure: A Backstepping Approach; citation_author=Lippiello, V.; citation_author=Ruggiero, F.; citation_author=Serra, D.; citation_publication_date=2014"> <meta name="citation_reference" content="citation_title=Emergency Landing for a Quadrotor in Case of a Propeller Failure: A PID Based Approach; citation_author=Lippiello, V.; citation_author=Ruggiero, F.; citation_author=Serra, D.; citation_publication_date=2014"> <meta name="citation_reference" content="citation_title=Modelling and identification methods for simulation of cable-suspended dual-arm robotic systems; citation_author=D’Ago, G.; citation_author=Selvaggio, M.; citation_author=Suarez, A.; citation_author=Gañán, F. J.; citation_author=Buonocore, L. R.; citation_author=Di Castro, M.; citation_author=Lippiello, V.; citation_author=Ollero, A.; citation_author=Ruggiero, F.; citation_publication_date=2024; citation_journal_title=Robot Auton Syst; citation_volume=175; citation_firstpage=104643; citation_doi=10.1016/j.robot.2024.104643"> <meta name="citation_reference" content="citation_title=A Multilayer Control for Multirotor uavs Equipped with a Servo Robot Arm; citation_author=Ruggiero, F.; citation_author=Trujillo, M.; citation_author=Cano, R.; citation_author=Ascorbe, H.; citation_author=Viguria, A.; citation_author=Peréz, C.; citation_author=Lippiello, V.; citation_author=Ollero, A.; citation_author=Siciliano, B.; citation_publication_date=2015"> <meta name="citation_reference" content="citation_title=A Novel Hybrid Aerial-Ground Manipulator for Pipeline Inspection Tasks; citation_author=Cacace, J.; citation_author=Fontanelli, G. A.; citation_author=Lippiello, V.; citation_publication_date=2021; citation_inbook=Aerial Robotic Systems Physically Interacting with the Environment (AIRPHARO); citation_firstpage=1; citation_lastpage=6"> <meta name="citation_reference" content="citation_title=A novel articulated rover for industrial pipes inspection tasks; citation_author=Cacace, J.; citation_author=Silva, M. D.; citation_author=Fontanelli, G. A.; citation_author=Lippiello, V.; citation_publication_date=2021"> <meta name="citation_reference" content="citation_title=A Hardware-in-the- Loop Simulator for Physical Human-Aerial Manipulator Cooperation; citation_author=Cuniato, E.; citation_author=Cacace, J.; citation_author=Selvaggio, M.; citation_author=Ruggiero, F.; citation_author=Lippiello, V.; citation_publication_date=2021"> <meta name="citation_reference" content="citation_title=Image-based visual-impedance control of a dual-arm aerial manipulator; citation_author=Lippiello, V.; citation_author=Fontanelli, G. A.; citation_author=Ruggiero, F.; citation_publication_date=2018; citation_journal_title=IEEE Robot Auto Lett; citation_volume=3; citation_firstpage=1856; citation_lastpage=1863; citation_doi=10.1109/LRA.2018.2806091"> <meta name="citation_reference" content="citation_title=Development of a Control Framework to Autonomously Install Clip Bird Diverters on High-Voltage Lines; citation_author=D’Angelo, S.; citation_author=Pagano, F.; citation_author=Ruggiero, F.; citation_author=Lippiello, V.; citation_publication_date=2023"> <meta name="citation_reference" content="citation_title=Autonomy in physical human-robot interaction: A brief survey; citation_author=Selvaggio, M.; citation_author=Cognetti, M.; citation_author=Nikolaidis, S.; citation_author=Ivaldi, S.; citation_author=Siciliano, B.; citation_publication_date=2021; citation_journal_title=IEEE Robot Autom Lett; citation_volume=6; citation_firstpage=7989; citation_lastpage=7996; citation_doi=10.1109/LRA.2021.3100603"> <meta name="citation_reference" content="citation_title=A hierarchical human-robot interaction-planning framework for task allocation in collaborative industrial assembly processes; citation_author=Johannsmeier, L.; citation_author=Haddadin, S.; citation_publication_date=2017; citation_journal_title=IEEE Robot Autom Lett; citation_volume=2; citation_firstpage=41; citation_lastpage=48; citation_doi=10.1109/LRA.2016.2535907"> <meta name="citation_reference" content="citation_title=Combining human guidance and structured task execution during physical human–robot collaboration; citation_author=Cacace, J.; citation_author=Caccavale, R.; citation_author=Finzi, A.; citation_author=Grieco, R.; citation_publication_date=2022; citation_journal_title=J Intell Manuf; citation_volume=34; citation_firstpage=3053; citation_lastpage=3067; citation_doi=10.1007/s10845-022-01989-y"> <meta name="citation_reference" content="citation_title=Interactive plan execution during human-robot cooperative manipulation; citation_author=Cacace, J.; citation_author=Caccavale, R.; citation_author=Finzi, A.; citation_author=Lippiello, V.; citation_publication_date=2018; citation_journal_title=IFAC-PapersOnLine; citation_volume=51; citation_firstpage=500; citation_lastpage=505; citation_doi=10.1016/j.ifacol.2018.11.584"> <meta name="citation_reference" content="citation_title=Human-robot interaction: A survey; citation_author=Goodrich, M. A.; citation_author=Schultz, A. C.; citation_publication_date=2008; citation_journal_title=Found Trends® Human-Comp Inter; citation_volume=1; citation_firstpage=203; citation_lastpage=275; citation_doi=10.1561/1100000005"> <meta name="citation_reference" content="citation_title=Medical robotics 2014;regulatory, ethical, and legal considerations for increasing levels of autonomy; citation_author=Yang, G.-Z.; citation_author=Cambias, J.; citation_author=Cleary, K.; citation_author=Daimler, E.; citation_author=Drake, J.; citation_author=Dupont, P. E.; citation_author=Hata, N.; citation_author=Kazanzides, P.; citation_author=Martel, S.; citation_author=Patel, R. V.; citation_author=Santos, V. J.; citation_author=Taylor, R. H.; citation_publication_date=2017; citation_journal_title=Sci Robot; citation_volume=2; citation_firstpage=eaam8638; citation_doi=10.1126/scirobotics.aam8638"> <meta name="citation_reference" content="citation_title=Human-Robot Interaction in Social Robotics; citation_author=Kanda, T.; citation_author=Ishiguro, H.; citation_publication_date=2017"> <meta name="citation_reference" content="citation_title=Shared autonomy–learning of joint action and human-robot collaboration; citation_author=Schilling, M.; citation_author=Burgard, W.; citation_author=Muelling, K.; citation_author=Wrede, B.; citation_author=Ritter, H.; citation_publication_date=2019; citation_journal_title=Front Neurorobotics; citation_volume=13; citation_firstpage=16; citation_doi=10.3389/fnbot.2019.00016"> <meta name="citation_reference" content="citation_title=Dynamic-Autonomy for Urban Search and Rescue; citation_author=Bruemmer, D. J.; citation_author=Dudenhoeffer, D. D.; citation_author=Marble, J. L.; citation_publication_date=2002; citation_inbook=AAAI Mobile Robot Competition; citation_firstpage=33; citation_lastpage=37"> <meta name="citation_reference" content="citation_title=Sliding Autonomy for Peer-to-Peer Human-Robot Teams; citation_author=Dias, M. B.; citation_author=Kannan, B.; citation_author=Browning, B.; citation_author=Jones, E.; citation_author=Argall, B.; citation_author=Dias, M. F.; citation_author=Zinck, M.; citation_author=Veloso, M.; citation_author=Stentz, A.; citation_publication_date=2008"> <meta name="citation_reference" content="citation_title=Adjustable Control Autonomy for Manned Space Flight; citation_author=Kortenkamp, D.; citation_author=Keirn-Schreckenghost, D.; citation_author=Bonasso, R. P.; citation_publication_date=2000"> <meta name="citation_reference" content="citation_title=Semi-Autonomous Stability Control and Hazard Avoidance for Manned and Unmanned Ground Vehicles; citation_author=Anderson, S.; citation_author=Peters, S.; citation_author=Iagnemma, K.; citation_author=Overholt, J.; citation_publication_date=2010"> <meta name="citation_reference" content="citation_title=Blending Human and Robot Inputs for Sliding Scale Autonomy; citation_author=Desai, M.; citation_author=Yanco, H. A.; citation_publication_date=2005; citation_inbook=IEEE International Workshop on Robot and Human Interactive Communication (ROMAN); citation_firstpage=537; citation_lastpage=542"> <meta name="citation_reference" content="citation_title=Towards Perceptual Shared Autonomy for Robotic Mobile Manipulation; citation_author=Pitzer, B.; citation_author=Styer, M.; citation_author=Bersch, C.; citation_author=DuHadway, C.; citation_author=Becker, J.; citation_publication_date=2011"> <meta name="citation_reference" content="citation_title=User Modelling for Principled Sliding Autonomy in Human-Robot Teams; citation_author=Sellner, B.; citation_author=Simmons, R.; citation_author=Singh, S.; citation_publication_date=2005; citation_inbook=Multi-Robot Systems. From Swarms to Intelligent Automata; citation_firstpage=197; citation_lastpage=208"> <meta name="citation_reference" content="citation_title=A Policy-Blending Formalism for Shared Control; citation_author=Dragan, A. D.; citation_author=Srinivasa, S. S.; citation_publication_date=2013; citation_journal_title=Int J Robot Res; citation_volume=32; citation_firstpage=790; citation_lastpage=805; citation_doi=10.1177/0278364913490324"> <meta name="citation_reference" content="citation_title=Shared Autonomy Via Hindsight Optimization; citation_author=Javdani, S.; citation_author=Srinivasa, S. S.; citation_author=Bagnell, J. A.; citation_publication_date=2015; citation_inbook=Robotics Science and Systems"> <meta name="citation_reference" content="citation_title=Adaptive virtual fixtures for machine-assisted teleoperation tasks; citation_author=Aarno, D.; citation_author=Ekvall, S.; citation_author=Kragic, D.; citation_publication_date=2005"> <meta name="citation_reference" content="citation_title=Characterizing Efficiency of Human Robot Interaction: A Case Study of Shared-Control teleoperation; citation_author=Crandall, J. W.; citation_author=Goodrich, M. A.; citation_publication_date=2002"> <meta name="citation_reference" content="citation_title=Haptic-based shared-control methods for a dual-arm system; citation_author=Selvaggio, M.; citation_author=Abi-Farraj, F.; citation_author=Pacchierotti, C.; citation_author=Giordano, P. R.; citation_author=Siciliano, B.; citation_publication_date=2018; citation_journal_title=IEEE Robot Auto Lett; citation_volume=3; citation_firstpage=4249; citation_lastpage=4256; citation_doi=10.1109/LRA.2018.2864353"> <meta name="citation_reference" content="citation_title=Passive task-prioritized shared-control teleoperation with haptic guidance; citation_author=Selvaggio, M.; citation_author=Giordano, P. R.; citation_author=Ficuciello, F.; citation_author=Siciliano, B.; citation_publication_date=2019"> <meta name="citation_reference" content="citation_title=Passive virtual fixtures adaptation in minimally invasive robotic surgery; citation_author=Selvaggio, M.; citation_author=Fontanelli, G. A.; citation_author=Ficuciello, L.; citation_author=Villani, F.; citation_author=Siciliano, B.; citation_publication_date=2018; citation_journal_title=IEEE Robot Auto Lett; citation_volume=3; citation_firstpage=3129; citation_lastpage=3136; citation_doi=10.1109/LRA.2018.2849876"> <meta name="citation_reference" content="citation_title=Enhancing bilateral teleoperation using camera-based online virtual fixtures generation; citation_author=Selvaggio, M.; citation_author=Notomista, G.; citation_author=Chen, F.; citation_author=Gao, B.; citation_author=Trapani, F.; citation_author=Caldwell, D.; citation_publication_date=2016"> <meta name="citation_reference" content="citation_title=Haptic-guided shared control for needle grasping optimization in minimally invasive robotic surgery; citation_author=Selvaggio, M.; citation_author=A.Ghalamzan, E.; citation_author=Moccia, R.; citation_author=Ficuciello, F.; citation_author=Siciliano, B.; citation_publication_date=2019"> <meta name="citation_reference" content="citation_title=Human-robot interaction review: Challenges and solutions for modern industrial environments; citation_author=Rodriguez-Guerra, D.; citation_author=Sorrosal, G.; citation_author=Cabanes, I.; citation_author=Calleja, C.; citation_publication_date=2021; citation_journal_title=IEEE Access; citation_volume=9; citation_firstpage=108557; citation_lastpage=108578; citation_doi=10.1109/ACCESS.2021.3099287"> <meta name="citation_reference" content="citation_title=Everyday activities; citation_author=Schultheis, H.; citation_author=Cooper, R. P.; citation_publication_date=2022"> <meta name="citation_reference" content="citation_title=Know rob 2.0–a 2nd Generation Knowledge Processing Framework for Cognition-Enabled Robotic Agents; citation_author=Beetz, M.; citation_author=Beßler, D.; citation_author=Haidu, A.; citation_author=Pomarlan, M.; citation_author=Bozcuoğlu, A. K.; citation_author=Bartels, G.; citation_publication_date=2018"> <meta name="citation_reference" content="citation_title=Artificial cognition for social human–robot interaction: An implementation; citation_author=Lemaignan, S.; citation_author=Warnier, M.; citation_author=Sisbot, E. A.; citation_author=Clodic, A.; citation_author=Alami, R.; citation_publication_date=2017; citation_journal_title=Artif Intell; citation_volume=247; citation_firstpage=45; citation_lastpage=69; citation_doi=10.1016/j.artint.2016.07.002"> <meta name="citation_reference" content="citation_title=A Formal Model of Affordances for Flexible Robotic Task Execution; citation_author=Beßler, D.; citation_author=Porzel, R.; citation_author=Pomarlan, M.; citation_author=Beetz, M.; citation_author=Malaka, R.; citation_author=Bateman, J.; citation_publication_date=2020; citation_inbook=ECAI; citation_firstpage=2425; citation_lastpage=2432"> <meta name="citation_reference" content="citation_title=Reconfigurable Behavior Trees: Towards an Executive framework meeting high-level decision making and control layer features; citation_author=de la Cruz, P.; citation_author=Piater, J.; citation_author=Saveriano, M.; citation_publication_date=2020"> <meta name="citation_reference" content="citation_title=Model-based control architecture for attentive robots in rescue scenarios; citation_author=Carbone, A.; citation_author=Finzi, A.; citation_author=Orlandini, A.; citation_author=Pirri, F.; citation_publication_date=2008; citation_journal_title=Auton Robot; citation_volume=24; citation_firstpage=87; citation_lastpage=120; citation_doi=10.1007/s10514-007-9055-6"> <meta name="citation_reference" content="citation_title=Robust Execution of Plans for Human-Robot Teams; citation_author=Karpas, E.; citation_author=Levine, S. J.; citation_author=Yu, P.; citation_author=Williams, B. C.; citation_publication_date=2015; citation_inbook=ICAPS-2015; citation_firstpage=342; citation_lastpage=346"> <meta name="citation_reference" content="citation_title=Conflict monitoring and cognitive control; citation_author=Botvinick, M. M.; citation_author=Braver, T. S.; citation_author=Barch, D. M.; citation_author=Carter, C. S.; citation_author=Cohen, J. D.; citation_publication_date=2001; citation_journal_title=Psychol Rev; citation_volume=108; citation_firstpage=624; citation_lastpage=652; citation_doi=10.1037/0033-295X.108.3.624"> <meta name="citation_reference" content="citation_title=Contention scheduling and the control of routine activities; citation_author=Cooper, R.; citation_author=Shallice, T.; citation_publication_date=2000; citation_journal_title=Cogn Neuropsychol; citation_volume=17; citation_firstpage=297; citation_lastpage=338; citation_doi=10.1080/026432900380427"> <meta name="citation_reference" content="citation_title=Hierarchical schemas and goals in the control of sequential behavior; citation_author=Cooper, R.; citation_author=Shallice, T.; citation_publication_date=2006; citation_journal_title=Psychol Rev; citation_volume=113; citation_firstpage=887; citation_lastpage=916; citation_doi=10.1037/0033-295X.113.4.887"> <meta name="citation_reference" content="citation_title=Learning Object Manipulation Skills via Approximate State Estimation from Real Videos; citation_author=Petrík, V.; citation_author=Tapaswi, M.; citation_author=Laptev, I.; citation_author=Sivic, J.; citation_publication_date=2021"> <meta name="citation_reference" content="citation_title=A Survey on Semantic-Based Methods for the Understanding of Human Movements; citation_author=Ramirez-Amaro, K.; citation_author=Yang, Y.; citation_author=Cheng, G.; citation_publication_date=2019; citation_journal_title=Robot Auton Syst; citation_volume=119; citation_firstpage=31; citation_lastpage=50; citation_doi=10.1016/j.robot.2019.05.013"> <meta name="citation_reference" content="citation_title=Combining task and motion planning: Challenges and guidelines; citation_author=Mansouri, M.; citation_author=Pecora, F.; citation_author=Schüller, P.; citation_publication_date=2021; citation_journal_title=Front Robot AI; citation_volume=8; citation_firstpage=637888; citation_doi=10.3389/frobt.2021.637888"> <meta name="citation_reference" content="citation_title=Attentional Multimodal Interface for Multidrone Search in the Alps; citation_author=Cacace, J.; citation_author=Caccavale, R.; citation_author=Finzi, A.; citation_author=Lippiello, V.; citation_publication_date=2016"> <meta name="citation_reference" content="citation_title=Attentional Supervision of Human-Robot Collaborative Plans; citation_author=Caccavale, R.; citation_author=Cacace, J.; citation_author=Fiore, M.; citation_author=Alami, R.; citation_author=Finzi, A.; citation_publication_date=2016"> <meta name="citation_reference" content="citation_title=Plan Execution and Attentional Regulations for Flexible Human-Robot Interaction; citation_author=Caccavale, R.; citation_author=Finzi, A.; citation_publication_date=2015"> <meta name="citation_reference" content="citation_title=Flexible task execution and attentional regulations in human-robot interaction; citation_author=Caccavale, R.; citation_author=Finzi, A.; citation_publication_date=2016; citation_journal_title=IEEE Trans Cogn Develp Syst; citation_volume=9; citation_firstpage=68; citation_lastpage=79; citation_doi=10.1109/TCDS.2016.2614690"> <meta name="citation_reference" content="citation_title=Toward a Cognitive Control Framework for Explainable Robotics; citation_author=Caccavale, R.; citation_author=Finzi, A.; citation_publication_date=2021; citation_inbook=Human-Friendly Robotics 2020: 13th International Workshop; citation_firstpage=46; citation_lastpage=58"> <meta name="citation_reference" content="citation_title=A robotic cognitive control framework for collaborative task execution and learning; citation_author=Caccavale, R.; citation_author=Finzi, A.; citation_publication_date=2022; citation_journal_title=Top Cogn Sci; citation_volume=14; citation_firstpage=327; citation_lastpage=343; citation_doi=10.1111/tops.12587"> <meta name="citation_reference" content="citation_title=Attentional Regulations in a Situated Human-Robot Dialogue; citation_author=Caccavale, R.; citation_author=Leone, E.; citation_author=Lucignano, L.; citation_author=Rossi, S.; citation_author=Staffa, M.; citation_author=Finzi, A.; citation_publication_date=2014"> <meta name="citation_reference" content="citation_title=Learning attentional regulations for structured tasks execution in robotic cognitive control; citation_author=Caccavale, R.; citation_author=Finzi, A.; citation_publication_date=2019; citation_journal_title=Auton Robot; citation_volume=43; citation_firstpage=2229; citation_lastpage=2243; citation_doi=10.1007/s10514-019-09876-x"> <meta name="citation_reference" content="citation_title=Kinesthetic teaching and attentional supervision of structured tasks in human–robot interaction; citation_author=Caccavale, R.; citation_author=Saveriano, M.; citation_author=Finzi, A.; citation_author=Lee, D.; citation_publication_date=2019; citation_journal_title=Auton Robot; citation_volume=43; citation_firstpage=1291; citation_lastpage=1307; citation_doi=10.1007/s10514-018-9706-9"> <meta name="citation_reference" content="citation_title=Imitation Learning and Attentional Supervision of Dual-Arm Structured Tasks; citation_author=Caccavale, R.; citation_author=Saveriano, M.; citation_author=Fontanelli, G. A.; citation_author=Ficuciello, F.; citation_author=Lee, D.; citation_author=Finzi, A.; citation_publication_date=2017"> <meta name="citation_reference" content="citation_title=A multi-robot deep Q-learning framework for priority-based sanitization of railway stations; citation_author=Caccavale, R.; citation_author=Ermini, M.; citation_author=Fedeli, E.; citation_author=Finzi, A.; citation_author=Lippiello, V.; citation_author=Tavano, F.; citation_publication_date=2023; citation_journal_title=Appl Intell; citation_volume=53; citation_firstpage=20595; citation_lastpage=20613; citation_doi=10.1007/s10489-023-04529-0"> <meta name="citation_reference" content="citation_title=Toward a Heterogeneous Multi-Robot Framework for Priority-Based Sanitization of Railway Stations; citation_author=Caccavale, R.; citation_author=Ermini, M.; citation_author=Fedeli, E.; citation_author=Finzi, A.; citation_author=Lippiello, V.; citation_author=Tavano, F.; citation_publication_date=2022; citation_inbook=AIxIA 2022–Advances in Artificial Intelligence: XXIst International Conference of the Italian Association for Artificial Intelligence, AIxIA 2022; citation_firstpage=387; citation_lastpage=401"> <meta name="citation_reference" content="citation_title=A rapidly-exploring random trees approach to combined task and motion planning; citation_author=Caccavale, R.; citation_author=Finzi, A.; citation_publication_date=2022; citation_journal_title=Robot Auton Syst; citation_volume=157; citation_firstpage=104238; citation_doi=10.1016/j.robot.2022.104238"> <meta name="citation_reference" content="citation_title=Attention to action: Willed and automatic control of behavior; citation_author=Norman, D. A.; citation_author=Shallice, T.; citation_publication_date=1986"> <meta name="citation_reference" content="citation_title=Robotics-Logistics: Challenges for Automation of Logistic Processes; citation_author=Echelmeyer, W.; citation_author=Kirchheim, A.; citation_author=Wellbrock, E.; citation_publication_date=2008"> <meta name="citation_reference" content="citation_title=Real-time planning robotic palletizing tasks using reusable roadmaps; citation_author=Sakamoto, T.; citation_author=Harada, K.; citation_author=Wan, W.; citation_publication_date=2020; citation_journal_title=J Robot, Network Art Life; citation_volume=6; citation_firstpage=240; citation_lastpage=245; citation_doi=10.2991/jrnal.k.200222.009"> <meta name="citation_reference" content="citation_title=Ai-Based Learning Approach with Consideration of Safety Criteria on Example of a Depalletization Robot; citation_author=Jocas, M.; citation_author=Kurrek, P.; citation_author=Zoghlami, F.; citation_author=Gianni, M.; citation_author=Salehi, V.; citation_publication_date=2019"> <meta name="citation_reference" content="citation_title=High-Speed and Compact Depalletizing Robot Capable of Handling Packages Stacked Complicatedly; citation_author=Nakamoto, H.; citation_author=Eto, H.; citation_author=Sonoura, T.; citation_author=Tanaka, J.; citation_author=Ogawa, A.; citation_publication_date=2016"> <meta name="citation_reference" content="citation_title=Rgb-d object detection and semantic segmentation for autonomous manipulation in clutter; citation_author=Schwarz, M.; citation_author=Milan, A.; citation_author=Periyasamy, A. S.; citation_author=Behnke, S.; citation_publication_date=2018; citation_journal_title=Int J Robot Res; citation_volume=37; citation_firstpage=437; citation_lastpage=451; citation_doi=10.1177/0278364917713117"> <meta name="citation_reference" content="citation_title=An Efficient Depalletizing System Based on 2d Range Imagery; citation_author=Katsoulas, D.; citation_author=Kosmopoulos, D.; citation_publication_date=2001"> <meta name="citation_reference" content="citation_title=The next step in robot commissioning: Autonomous picking and palletizing; citation_author=Krug, R.; citation_author=Stoyanov, T.; citation_author=Tincani, V.; citation_author=Andreasson, H.; citation_author=Mosberger, R.; citation_author=Fantoni, G.; citation_author=Lilienthal, A. J.; citation_publication_date=2016; citation_journal_title=IEEE Robot Auto Lett; citation_volume=1; citation_firstpage=546; citation_lastpage=553; citation_doi=10.1109/LRA.2016.2519944"> <meta name="citation_reference" content="citation_title=Suction pad unit using a bellows pneumatic actuator as a support mechanism for an end effector of depalletizing robots; citation_author=Tanaka, J.; citation_author=Ogawa, A.; citation_author=Nakamoto, H.; citation_author=Sonoura, T.; citation_author=Eto, H.; citation_publication_date=2020; citation_journal_title=ROBOMECH Journal; citation_volume=7; citation_firstpage=2; citation_doi=10.1186/s40648-019-0151-0"> <meta name="citation_reference" content="citation_title=Application for Automatic Programming of Palletizing Robots; citation_author=Moura, F. M.; citation_author=Silva, M. F.; citation_publication_date=2018"> <meta name="citation_reference" content="citation_title=A flexible robotic depalletizing system for supermarket logistics; citation_author=Caccavale, R.; citation_author=Arpenti, P.; citation_author=Paduano, G.; citation_author=Fontanellli, A.; citation_author=Lippiello, V.; citation_author=Villani, L.; citation_author=Siciliano, B.; citation_publication_date=2020; citation_journal_title=IEEE Robot Auto Lett; citation_volume=5; citation_firstpage=4471; citation_lastpage=4476; citation_doi=10.1109/LRA.2020.3000427"> <meta name="citation_reference" content="citation_title=Rgb-d recognition and localization of cases for robotic depalletizing in supermarkets; citation_author=Arpenti, P.; citation_author=Caccavale, R.; citation_author=Paduano, G.; citation_author=Fontanelli, G. A.; citation_author=Lippiello, V.; citation_author=Villani, L.; citation_author=Siciliano, B.; citation_publication_date=2020; citation_journal_title=IEEE Robotics and Automation Letters; citation_volume=5; citation_firstpage=6233; citation_lastpage=6238; citation_doi=10.1109/LRA.2020.3013936"> <meta name="citation_reference" content="citation_title=A reconfigurable gripper for robotic autonomous depalletizing in supermarket logistics; citation_author=Fontanelli, G. A.; citation_author=Paduano, G.; citation_author=Caccavale, R.; citation_author=Arpenti, P.; citation_author=Lippiello, V.; citation_author=Villani, L.; citation_author=Siciliano, B.; citation_publication_date=2020; citation_journal_title=IEEE Robot Autom Lett; citation_volume=5; citation_firstpage=4612; citation_lastpage=4617; citation_doi=10.1109/LRA.2020.3003283"> <meta name="citation_reference" content="citation_title=Active constraints/Virtual fixtures: A survay; citation_author=Bowyer, S. A.; citation_author=Davies, B. L.; citation_author=Baena, F. R.; citation_publication_date=2014; citation_journal_title=IEEE Trans Robot; citation_volume=30; citation_firstpage=138; citation_lastpage=157; citation_doi=10.1109/TRO.2013.2283410"> <meta name="citation_reference" content="citation_title=Spatial motion constraints using virtual fixtures generated by anatomy; citation_author=Li, M.; citation_author=Ishii, M.; citation_author=Taylor, R. H.; citation_publication_date=2007; citation_journal_title=IEEE Trans Robot; citation_volume=23; citation_firstpage=4; citation_lastpage=19; citation_doi=10.1109/TRO.2006.886838"> <meta name="citation_reference" content="citation_title=Dynamic active constraints for surgical robots using vector field inequalities; citation_author=Marinho, M. M.; citation_author=Adorno, B. V.; citation_author=k., H.; citation_author=Mitsuishi, M.; citation_publication_date=2019; citation_journal_title=IEEE Trans Robot; citation_volume=35; citation_firstpage=1166; citation_lastpage=1185; citation_doi=10.1109/TRO.2019.2920078"> <meta name="citation_reference" content="citation_title=Control Barrier Functions: Theory and Applications; citation_author=Ames, A. D.; citation_author=Coogan, S.; citation_author=Egerstedt, M.; citation_author=Notomista, G.; citation_author=Sreenath, K.; citation_author=Tabuada, P.; citation_publication_date=2019"> <meta name="citation_reference" content="citation_title=A surgical palpation probe with 6-axis force/torque sensing capability for minimally invasive surgery; citation_author=Kim, U.; citation_author=Kim, Y. B.; citation_author=Seok, D.-Y.; citation_author=So, J.; citation_author=Choi, H. R.; citation_publication_date=2018; citation_journal_title=IEEE Trans Ind Electron; citation_volume=65; citation_firstpage=2755; citation_lastpage=2765; citation_doi=10.1109/TIE.2017.2739681"> <meta name="citation_reference" content="citation_title=A laparoscopic grasping tool with force sensing capability; citation_author=Lee, D.-H.; citation_author=Kim, U.; citation_author=Gulrez, T.; citation_author=Yoon, W. J.; citation_author=Hannaford, B.; citation_author=Choi, H. R.; citation_publication_date=2016; citation_journal_title=IEEE/ASME Trans Mech; citation_volume=21; citation_firstpage=130; citation_lastpage=141"> <meta name="citation_reference" content="citation_title=Adaptive synergies for the design and control of the pisa/iit softhand; citation_author=Catalano, M.; citation_author=Grioli, G.; citation_author=Farnioli, E.; citation_author=Serio, A.; citation_author=Piazza, C.; citation_author=Bicchi, A.; citation_publication_date=2014; citation_journal_title=Int J Robot Res; citation_volume=33; citation_firstpage=768; citation_lastpage=782; citation_doi=10.1177/0278364913518998"> <meta name="citation_reference" content="citation_title=The softhand pro-h: A hybrid body-controlled, electrically powered hand prosthesis for daily living and working; citation_author=Piazza, C.; citation_author=Catalano, M. G.; citation_author=Godfrey, S. B.; citation_author=Rossi, M.; citation_author=Grioli, G.; citation_author=Bianchi, M.; citation_author=Zhao, K.; citation_author=Bicchi, A.; citation_publication_date=2017; citation_journal_title=IEEE Robot Autom Mag; citation_volume=24; citation_firstpage=87; citation_lastpage=101; citation_doi=10.1109/MRA.2017.2751662"> <meta name="citation_reference" content="citation_title=Vision-Based Virtual Fixtures Generation for Robotic-Assisted Polyp Dissection Procedures; citation_author=Moccia, R.; citation_author=Selvaggio, M.; citation_author=Villani, L.; citation_author=Siciliano, B.; citation_author=Ficuciello, F.; citation_publication_date=2019"> <meta name="citation_reference" content="citation_title=Vision-based dynamic virtual fixtures for tools collision avoidance in robotic surgery; citation_author=Moccia, R.; citation_author=Iacono, C.; citation_author=Siciliano, B.; citation_author=Ficuciello, F.; citation_publication_date=2020; citation_journal_title=IEEE Robot Auto Lett; citation_volume=5; citation_firstpage=1650; citation_lastpage=1655; citation_doi=10.1109/LRA.2020.2969941"> <meta name="citation_reference" content="citation_title=The MUSHA hand II: A multi-functional hand for robot-assisted laparoscopic surgery; citation_author=Liu, H.; citation_author=Selvaggio, M.; citation_author=Ferrentino, P.; citation_author=Moccia, R.; citation_author=Pirozzi, S.; citation_author=Bracale, U.; citation_author=Ficuciello, F.; citation_publication_date=2020; citation_journal_title=IEEE/ASME Trans Mech; citation_volume=26; citation_firstpage=393; citation_lastpage=404"> <meta name="citation_reference" content="citation_title=Bracale Patent Granted n. 102019000001187, Application Submission Date Jan 2019; citation_author=Saini, S.; citation_author=Ficuciello, F.; citation_author=Liu, H.; citation_publication_date=2019; citation_inbook=Elemento Terminale Per Dispositivi Di Presa Per Interventi Chirurgici, in Particolare Interventi a Minima Invasività"> <meta name="citation_reference" content="citation_title=The musha underactuated hand for robot-aided minimally invasive surgery; citation_author=Selvaggio, M.; citation_author=Fontanelli, G. A.; citation_author=Marrazzo, V. R.; citation_author=Bracale, U.; citation_author=Irace, A.; citation_author=Breglio, G.; citation_author=Villani, L.; citation_author=Siciliano, B.; citation_author=Ficuciello, F.; citation_publication_date=2019; citation_journal_title=Int J Med Robot Comp Assis Surg; citation_volume=15; citation_firstpage=e1981; citation_doi=10.1002/rcs.1981"> <meta name="citation_reference" content="citation_title=Autonomous Endoscope Control Algorithm with Visibility and Joint Limits Avoidance Constraints for Da Vinci Research kit robot; citation_author=Moccia, R.; citation_author=Ficuciello, F.; citation_publication_date=2023"> <meta name="citation_reference" content="citation_title=A Portable Da Vinci Simulator in Virtual Reality; citation_author=Ferro, M.; citation_author=Brunori, D.; citation_author=Magistri, F.; citation_author=Saiella, L.; citation_author=Selvaggio, M.; citation_author=Fontanelli, G. A.; citation_publication_date=2019"> <meta name="citation_reference" content="citation_title=Portable dVRK: An augmented V-REP simulator of da vinci research kit; citation_author=Fontanelli, G. A.; citation_author=Selvaggio, M.; citation_author=Ferro, M.; citation_author=Ficuciello, F.; citation_author=Vendittelli, M.; citation_author=Siciliano, B.; citation_publication_date=2019; citation_journal_title=Acta Polytech Hung; citation_volume=16; citation_firstpage=79; citation_lastpage=98"> <meta name="citation_reference" content="citation_title=Stiffness modeling of the soft-finger contact in robotic grasping; citation_author=Ghafoor, A.; citation_author=Dai, J. S.; citation_author=Duffy, J.; citation_publication_date=2004; citation_journal_title=J Mech Design; citation_volume=126; citation_firstpage=646; citation_lastpage=656; citation_doi=10.1115/1.1758255"> <meta name="citation_reference" content="citation_title=Prototype realization of a human hand-inspired needle driver for robotic-assisted surgery; citation_author=Sallam, M.; citation_author=Fontanelli, G. A.; citation_author=Gallo, A.; citation_author=La Rocca, R.; citation_author=Di Spiezio Sardo, A.; citation_author=Longo, N.; citation_author=Ficuciello, F.; citation_publication_date=2023; citation_journal_title=IEEE Trans Med Robot Bio; citation_volume=5; citation_firstpage=843; citation_lastpage=856; citation_doi=10.1109/TMRB.2023.3309942"> <meta name="citation_reference" content="citation_title=Planning of soft-rigid hybrid arms in contact with compliant environment: Application to the transrectal biopsy of the prostate; citation_author=Coevoet, E.; citation_author=Adagolodjo, Y.; citation_author=Lin, M.; citation_author=Duriez, C.; citation_author=Ficuciello, F.; citation_publication_date=2022; citation_journal_title=IEEE Robot Auto Lett; citation_volume=7; citation_firstpage=4853; citation_lastpage=4860; citation_doi=10.1109/LRA.2022.3152322"> <meta name="citation_reference" content="citation_title=Calibration of Tactile/Force Sensors for Grasping with the PRISMA Hand II; citation_author=Canbay, D.; citation_author=Ferrentino, P.; citation_author=Liu, H.; citation_author=Moccia, R.; citation_author=Pirozzi, S.; citation_author=Siciliano, B.; citation_author=Ficuciello, F.; citation_publication_date=2021"> <meta name="citation_reference" content="citation_title=Development and testing of a virtual simulator for a myoelectric prosthesis prototype – the prisma hand ii – to improve its usability and acceptability; citation_author=Leccia, A.; citation_author=Sallam, M.; citation_author=Grazioso, S.; citation_author=Caporaso, T.; citation_author=Di Gironimo, G.; citation_author=Ficuciello, F.; citation_publication_date=2023; citation_journal_title=Eng Appl Artif Intel; citation_volume=121; citation_firstpage=105853; citation_doi=10.1016/j.engappai.2023.105853"> <meta name="citation_reference" content="citation_title=Legged robots for object manipulation: A review; citation_author=Gong, Y.; citation_author=Sun, G.; citation_author=Nair, A.; citation_author=Bidwai, A.; citation_author=R., C. S.; citation_author=Grezmak, J.; citation_author=Sartoretti, G.; citation_author=Daltorio, K. A.; citation_publication_date=2023; citation_journal_title=Front Mech Eng; citation_volume=9; citation_doi=10.3389/fmech.2023.1142421"> <meta name="citation_reference" content="citation_title=Synthesis of a novel type of metamorphic mechanism module for large scale deployable grasping manipulators; citation_author=Jia, G.; citation_author=Huang, H.; citation_author=Li, B.; citation_author=Wu, Y.; citation_author=Cao, Q.; citation_author=Guo, H.; citation_publication_date=2018; citation_journal_title=Mech Mach Theory; citation_volume=128; citation_firstpage=544; citation_lastpage=559; citation_doi=10.1016/j.mechmachtheory.2018.06.017"> <meta name="citation_reference" content="citation_title=Type synthesis of plane-symmetric deployable grasping parallel mechanisms using constraint force parallelogram law; citation_author=Jia, G.; citation_author=Huang, H.; citation_author=Wang, S.; citation_author=Li, B.; citation_publication_date=2021; citation_journal_title=Mech Mach Theory; citation_volume=161; citation_firstpage=104330; citation_doi=10.1016/j.mechmachtheory.2021.104330"> <meta name="citation_reference" content="citation_title=Alma - Articulated Locomotion and Manipulation for a Torque-Controllable Robot; citation_author=Bellicoso, C. D.; citation_author=Krämer, K.; citation_author=Stäuble, M.; citation_author=Sako, D.; citation_author=Jenelten, F.; citation_author=Bjelonic, M.; citation_author=Hutter, M.; citation_publication_date=2019"> <meta name="citation_reference" content="citation_title=Roloma: Robust loco-manipulation for quadruped robots with arms; citation_author=Ferrolho, H.; citation_author=Ivan, V.; citation_author=Merkt, W.; citation_author=Havoutis, I.; citation_author=Vijayakumar, S.; citation_publication_date=2023; citation_journal_title=Auton Robot; citation_volume=47; citation_firstpage=1463; citation_lastpage=1481; citation_doi=10.1007/s10514-023-10146-0"> <meta name="citation_reference" content="citation_title=Visual and Haptic Cues for Human-Robot Handover*; citation_author=Costanzo, M.; citation_author=Natale, C.; citation_author=Selvaggio, M.; citation_publication_date=2023"> <meta name="citation_reference" content="citation_title=Origami-based robotic paper-and-board packaging for food industry; citation_author=Dai, J. S.; citation_author=Caldwell, D. G.; citation_publication_date=2010; citation_journal_title=Trend Food Sci Tech; citation_volume=21; citation_firstpage=153; citation_lastpage=157; citation_doi=10.1016/j.tifs.2009.10.007"> <meta name="citation_abstract" content="In this article, we review the main results achieved by the research activities carried out at PRISMA Lab of the University of Naples Federico II where, for 35 years, an interdisciplinary team of experts developed robots that are ultimately useful to humans. We summarize the key contributions made in the last decade in the six research areas of dynamic manipulation and locomotion, aerial robotics, human-robot interaction, artificial intelligence and cognitive robotics, industrial robotics, and medical robotics. After a brief overview of each research field, the most significant methodologies and results are reported and discussed, highlighting their cross-disciplinary and translational aspects. Finally, the potential future research directions identified are discussed."> <meta name="citation_doi" content="10.1017/S026357472400033X"> <link rel="alternate" href="/core/journals/robotica/article/robotics-goes-prisma/5DA1E6B0701411F71E5FFC40F2E53847" hreflang="en" /> <link rel="icon" href="/core/cambridge-core/public/images/favicon.ico" type="image/x-icon"/> <link rel="shortcut icon" href="/core/cambridge-core/public/images/favicon.ico" type="image/x-icon"/> <link href='//fonts.googleapis.com/css?family=Noto+Sans:400,700,400italic,700italic' rel='stylesheet' type='text/css'> <!--[if (gte IE 10)|!(IE)]><!--> <link rel="stylesheet" href="/core/cambridge-core/public/css/app.css?version=v7.337.1-hotfix"/> <link rel="dns-prefetch" href="https://usage.prod.aop.cambridge.org/v1/events"> <link rel="stylesheet" href="/core/cambridge-core/public/bower_components/font-awesome/css/font-awesome.min.css"/> <script src="/core/system/public/bower_components/jquery/dist/jquery.min.js" onerror="console.error('jQuery failed to load')"></script> <script src="/core/system/public/bower_components/jquery-migrate-3/jquery-migrate.min.js" onerror="console.error('jQuery migrate failed to load')"></script> <!--<![endif]--> <!-- IE8/9 Fixes --> <!-- Only supports version 1 of jquery --> <!-- Has a CSS limitation of 4096 directives per file, so we need to split it up into multiple files --> <!--[if lt IE 10]> <link rel="stylesheet" href="/core/cambridge-core/public/css/ie-8-9.css" /> <script src="/core/cambridge-core/public/js/ie8-head.min.js"></script> <script src="/core/cambridge-core/public/js/jquery-1.11.2.min.js"></script> <![endif]--> <script src="/core/cambridge-core/public/bower_components/modernizr/modernizr.js"></script> <!-- Google Analytics --> <script> (function(a,s,y,n,c,h,i,d,e){s.className+=' '+y;h.start=1*new Date; h.end=i=function(){s.className=s.className.replace(RegExp(' ?'+y),'')}; (a[n]=a[n]||[]).hide=h;setTimeout(function(){i();h.end=null},c);h.timeout=c; })(window,document.documentElement,'async-hide','dataLayer',4000, {'GTM-PWNSR3B':true}); (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){ (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o), m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m) })(window,document,'script','https://www.google-analytics.com/analytics.js','ga'); ga('create', 'UA-86516288-1', 'auto'); ga('require', 'GTM-PWNSR3B'); ga('send', 'pageview'); </script> <!-- End Google Analytics --> <!-- Google Tag Manager --> <script>(function(w,d,s,l,i){w[l]=w[l]||[];w[l].push({'gtm.start': new Date().getTime(),event:'gtm.js'});var f=d.getElementsByTagName(s)[0], j=d.createElement(s),dl=l!='dataLayer'?'&l='+l:'';j.src= 'https://www.googletagmanager.com/gtm.js?id='+i+dl; f.parentNode.insertBefore(j,f); })(window,document,'script','dataLayer','GTM-NTX72TG');</script> <!-- End Google Tag Manager --> <!-- Site24x7 --> <script type="text/javascript"> var rumMOKey='bd7888531793d453d3bdf2268eea537b'; (function(){ if(window.performance && window.performance.timing && window.performance.navigation) { var site24x7_rum_beacon=document.createElement('script'); site24x7_rum_beacon.async=true; site24x7_rum_beacon.setAttribute('src','//static.site24x7rum.eu/beacon/site24x7rum-min.js?appKey='+rumMOKey); document.getElementsByTagName('head')[0].appendChild(site24x7_rum_beacon); } })(window) </script> <!-- Site24x7 --> <script> var AOP = AOP || {}; AOP.uiLanguage = 'en' || 'en'; AOP.uiLanguageSuggested = ''; AOP.leaveTranslationSuggested = ''; AOP.isMultilanguageEnabled = 'true'; AOP.clientSideMultilingualString = '%5B%7B%22_id%22%3A%225a7c10d5ab0af516063aca2c%22%2C%22name%22%3A%22Cambridge%20University%20Press%20content%20sharing%20policy%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Share%20modal%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22Cambridge%20University%20Press%20content%20sharing%20policy.%22%2C%22fr%22%3A%22la%20politique%20de%20Cambridge%20University%20Press%20sur%20le%20partage%20(en%20anglais).%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9f6%22%2C%22name%22%3A%22reply%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Article%20eLetters%20tab%22%2C%22category%22%3A%22button%20label%22%2C%22en%22%3A%22reply%22%2C%22fr%22%3A%22r%C3%A9ponse%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063aca29%22%2C%22name%22%3A%22Share%20content%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Share%20modal%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22Share%20content%22%2C%22fr%22%3A%22Partager%20cet%20article%22%7D%2C%7B%22_id%22%3A%225b714525e54e3d290be053d6%22%2C%22name%22%3A%22To%20manage%20your%20alert%20preferences%20and%20update%20your%20details%20go%20to%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22%22%2C%22category%22%3A%22%22%2C%22en%22%3A%22To%20manage%20your%20alert%20preferences%20and%20update%20your%20details%20go%20to%22%2C%22fr%22%3A%22Pour%20g%C3%A9rer%20vos%20pr%C3%A9ferences%20d'alerte%20ou%20mettre%20%C3%A0%20jour%20vos%20details%20veuillez%20svp%20vous%20rendre%20sur%20la%20page%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9f5%22%2C%22name%22%3A%22replies%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Article%20eLetters%20tab%22%2C%22category%22%3A%22button%20label%22%2C%22en%22%3A%22replies%22%2C%22fr%22%3A%22r%C3%A9ponses%22%7D%2C%7B%22_id%22%3A%225b754bedf620e6600be6863d%22%2C%22name%22%3A%22For%20more%20information%2C%20please%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Share%20modal%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22For%20more%20information%2C%20please%22%2C%22fr%22%3A%22Pour%20plus%20d'information%2C%20veuillez-vous%22%7D%2C%7B%22_id%22%3A%225bb361659f569ab5071836a2%22%2C%22name%22%3A%22April%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22dates%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22April%22%2C%22fr%22%3A%22Avril%22%7D%2C%7B%22_id%22%3A%225bb361659f569ab5071836a3%22%2C%22name%22%3A%22May%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22dates%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22May%22%2C%22fr%22%3A%22Mai%22%7D%2C%7B%22_id%22%3A%225bb361659f569ab5071836aa%22%2C%22name%22%3A%22December%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22dates%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22December%22%2C%22fr%22%3A%22D%C3%A9cembre%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9d6%22%2C%22name%22%3A%22Sep%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Metrics%20tab%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22Sep%22%2C%22fr%22%3A%22Sep.%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9d4%22%2C%22name%22%3A%22Jul%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Metrics%20tab%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22Jul%22%2C%22fr%22%3A%22Juil.%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9db%22%2C%22name%22%3A%22Show%20fewer%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Refine%20listing%20options%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22Show%20fewer%22%2C%22fr%22%3A%22Afficher%20moins%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9e3%22%2C%22name%22%3A%22Citation%20Tools%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Export%20citation%20modal%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22Citation%20Tools%22%2C%22fr%22%3A%22Outils%20bibliographiques%22%7D%2C%7B%22_id%22%3A%225b754bedf620e6600be6863b%22%2C%22name%22%3A%22Alternatively%20you%20can%20download%20a%20PDF%20containing%20the%20link%20which%20can%20be%20freely%20shared%20online%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Share%20modal%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22Alternatively%20you%20can%20download%20a%20PDF%20containing%20the%20link%20which%20can%20be%20freely%20shared%20online%22%2C%22fr%22%3A%22Vous%20pouvez%20aussi%20t%C3%A9l%C3%A9charger%20le%20PDF%20contenant%20ce%20lien%20et%20le%20partager%20librement%20en%20ligne%22%7D%2C%7B%22_id%22%3A%225bb361659f569ab5071836a7%22%2C%22name%22%3A%22September%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22dates%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22September%22%2C%22fr%22%3A%22Septembre%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac94c%22%2C%22name%22%3A%22Selected%20format%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Export%20citation%20modal%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22Selected%20format%22%2C%22fr%22%3A%22Format%20s%C3%A9lectionn%C3%A9%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063aca2a%22%2C%22name%22%3A%22Copy%20and%20paste%20the%20content%20link%20or%20use%20the%20option%20below%20to%20share%20it%20via%20email%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Share%20modal%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22Copy%20and%20paste%20the%20content%20link%20or%20use%20the%20option%20below%20to%20share%20it%20via%20email%22%2C%22fr%22%3A%22Copiez-collez%20ce%20lien%20ou%20cliquez%20le%20bouton%20ci-dessous%20pour%20envoyer%20un%20email%20contenant%20le%20lien%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9e9%22%2C%22name%22%3A%22Delete%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Recommend%20to%20librarian%20modal%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22Delete%22%2C%22fr%22%3A%22Supprimer%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9cd%22%2C%22name%22%3A%22decline%20option%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22%22%2C%22category%22%3A%22%22%2C%22en%22%3A%22No%20thanks%22%2C%22fr%22%3A%22Non%20merci%20(view%20site%20in%20English)%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9d3%22%2C%22name%22%3A%22Jun%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Metrics%20tab%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22Jun%22%2C%22fr%22%3A%22Juin%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9e2%22%2C%22name%22%3A%22View%20Altmetric%20attention%20score%20details%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Article%20entry%22%2C%22category%22%3A%22alt%20text%22%2C%22en%22%3A%22View%20Altmetric%20attention%20score%20details%22%2C%22fr%22%3A%22Voir%20le%20d%C3%A9tail%20du%20score%20d'attention%20Altmetric%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9d8%22%2C%22name%22%3A%22Nov%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Metrics%20tab%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22Nov%22%2C%22fr%22%3A%22Nov.%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9dd%22%2C%22name%22%3A%22Page%2FArticle%20number%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Sorting%20options%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22Page%2FArticle%20number%22%2C%22fr%22%3A%22Page%2Fnum%C3%A9ro%20d%E2%80%99article%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9d5%22%2C%22name%22%3A%22Aug%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Metrics%20tab%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22Aug%22%2C%22fr%22%3A%22Ao%C3%BBt%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9c5%22%2C%22name%22%3A%22Collapse%20list%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22%22%2C%22category%22%3A%22%22%2C%22en%22%3A%22Collapse%20list%22%2C%22fr%22%3A%22Replier%20la%20liste%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9e0%22%2C%22name%22%3A%22Online%20publication%20date%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Sorting%20options%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22Online%20publication%20date%22%2C%22fr%22%3A%22Date%20de%20publication%20en%20ligne%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9f1%22%2C%22name%22%3A%22Show%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Article%20eLetters%20tab%22%2C%22category%22%3A%22button%20label%22%2C%22en%22%3A%22Show%22%2C%22fr%22%3A%22Afficher%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063aca30%22%2C%22name%22%3A%22The%20content%20link%20has%20been%20copied%20to%20your%20clipboard%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Share%20view%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22The%20content%20link%20has%20been%20copied%20to%20your%20clipboard%22%2C%22fr%22%3A%22Le%20lien%20a%20%C3%A9t%C3%A9%20copi%C3%A9%20dans%20votre%20presse-papier%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9e5%22%2C%22name%22%3A%22Hide%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Article%20entry%22%2C%22category%22%3A%22label%20with%20hyperlink%22%2C%22en%22%3A%22Hide%22%2C%22fr%22%3A%22Dissimuler%22%7D%2C%7B%22_id%22%3A%225b754bedf620e6600be6863f%22%2C%22name%22%3A%22Share%20this%20link%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Share%20modal%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22Share%20this%20link%22%2C%22fr%22%3A%22Partager%20ce%20lien%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9c6%22%2C%22name%22%3A%22Expand%20full%20list%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22%22%2C%22category%22%3A%22%22%2C%22en%22%3A%22Expand%20full%20list%22%2C%22fr%22%3A%22D%C3%A9plier%20la%20liste%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9da%22%2C%22name%22%3A%22Show%20more%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Refine%20listing%20options%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22Show%20more%22%2C%22fr%22%3A%22Afficher%20plus%22%7D%2C%7B%22_id%22%3A%225b754bedf620e6600be68642%22%2C%22name%22%3A%22Within%20PDF%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Share%20modal%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22Within%20PDF%22%2C%22fr%22%3A%22Par%20PDF%22%7D%2C%7B%22_id%22%3A%225bb361659f569ab5071836a0%22%2C%22name%22%3A%22February%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22dates%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22February%22%2C%22fr%22%3A%22F%C3%A9vrier%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac94b%22%2C%22name%22%3A%22Copy%20and%20paste%20a%20formatted%20citation%20or%20use%20one%20of%20the%20options%20to%20export%20in%20your%20chosen%20format%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Export%20citation%20modal%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22Copy%20and%20paste%20a%20formatted%20citation%20or%20use%20one%20of%20the%20options%20to%20export%20in%20your%20chosen%20format%22%2C%22fr%22%3A%22Copiez-collez%20la%20citation%20ou%20utilisez%20une%20des%20options%20ci-dessous%20pour%20l'exporter%20dans%20le%20format%20de%20votre%20choix%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac94e%22%2C%22name%22%3A%22Copy%20to%20clipboard%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Export%20citation%20modal%22%2C%22category%22%3A%22button%20label%22%2C%22en%22%3A%22Copy%20to%20clipboard%22%2C%22fr%22%3A%22Copier%20dans%20le%20presse-papier%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9df%22%2C%22name%22%3A%22Type%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Sorting%20options%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22Type%22%2C%22fr%22%3A%22Type%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9cf%22%2C%22name%22%3A%22Feb%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Metrics%20tab%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22Feb%22%2C%22fr%22%3A%22F%C3%A9v.%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9c8%22%2C%22name%22%3A%22Hide%20all%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22%22%2C%22category%22%3A%22%22%2C%22en%22%3A%22Hide%20All%22%2C%22fr%22%3A%22Dissimuler%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9e6%22%2C%22name%22%3A%22Search%20journal%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22contextual%20search%22%2C%22category%22%3A%22searchbox%20placeholder%20text%22%2C%22en%22%3A%22Search%20%3CJnlTitle%3E%22%2C%22fr%22%3A%22Chercher%20dans%20%3CJnlTitle%3E%22%7D%2C%7B%22_id%22%3A%225b754bedf620e6600be6863a%22%2C%22name%22%3A%22Copy%20and%20paste%20the%20link%20or%20use%20the%20option%20below%20to%20share%20it%20via%20email%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Share%20modal%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22Copy%20and%20paste%20the%20link%20or%20use%20the%20option%20below%20to%20share%20it%20via%20email%22%2C%22fr%22%3A%22Pour%20cela%2C%20copiez-collez%20ce%20lien%20ou%20bien%20utilisez%20l'option%20ci-dessous%20pour%20le%20partager%20par%20email%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9d1%22%2C%22name%22%3A%22Apr%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Metrics%20tab%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22Apr%22%2C%22fr%22%3A%22Avr.%22%7D%2C%7B%22_id%22%3A%225b754bedf620e6600be68640%22%2C%22name%22%3A%22Via%20email%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Share%20modal%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22Via%20email%22%2C%22fr%22%3A%22Par%20email%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9d0%22%2C%22name%22%3A%22Mar%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Metrics%20tab%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22Mar%22%2C%22fr%22%3A%22Mars%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9cb%22%2C%22name%22%3A%22close%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22%22%2C%22category%22%3A%22%22%2C%22en%22%3A%22Close%22%2C%22fr%22%3A%22Fermer%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9ca%22%2C%22name%22%3A%22leaving%20language%20prompt%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22%22%2C%22category%22%3A%22%22%2C%22en%22%3A%22%3Cp%3EThe%20%3Clang%3E%20version%20of%20Cambridge%20Core%20is%20not%20currently%20available%20through%20the%20entire%20site.%20The%20page%20you%20are%20visiting%20is%20not%20available%20in%20%3Clang%3E%20and%20will%20be%20displayed%20in%20English.%3C%2Fp%3E%22%2C%22fr%22%3A%22%3Cp%3ELa%20version%20fran%C3%A7aise%20de%20Cambridge%20Core%20n'est%20actuellement%20pas%20disponible%20%C3%A0%20travers%20l'int%C3%A9gralit%C3%A9%20du%20site.%20La%20page%20vers%20laquelle%20vous%20vous%20rendez%20n'est%20pas%20disponible%20en%20fran%C3%A7ais%20et%20sera%20affich%C3%A9e%20en%20anglais.%3C%2Fp%3E%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9c7%22%2C%22name%22%3A%22View%20all%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22%22%2C%22category%22%3A%22%22%2C%22en%22%3A%22View%20All%22%2C%22fr%22%3A%22Voir%20tout%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063aca2b%22%2C%22name%22%3A%22Anyone%20you%20share%20the%20following%20link%20with%20will%20be%20able%20to%20freely%20read%20this%20content.%20For%20more%20information%2C%20please%20view%20the%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Share%20modal%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22Anyone%20you%20share%20the%20following%20link%20with%20will%20be%20able%20to%20freely%20read%20this%20content.%20For%20more%20information%2C%20please%20view%20the%22%2C%22fr%22%3A%22Tous%20ceux%20avec%20qui%20vous%20partagez%20ce%20lien%20obtiendront%20un%20acc%C3%A8s%20gratuit%20%C3%A0%20cet%20article.%20Pour%20plus%20d'information%2C%20veuillez%20svp%20consulter%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9e8%22%2C%22name%22%3A%22Relevance%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Sorting%20options%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22Relevance%22%2C%22fr%22%3A%22Pertinence%22%7D%2C%7B%22_id%22%3A%225bb361659f569ab5071836a6%22%2C%22name%22%3A%22August%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22dates%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22August%22%2C%22fr%22%3A%22Ao%C3%BBt%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9d2%22%2C%22name%22%3A%22May%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Metrics%20tab%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22May%22%2C%22fr%22%3A%22Mai%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9dc%22%2C%22name%22%3A%22Sorted%20by%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Sorting%20options%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22Sorted%20by%22%2C%22fr%22%3A%22Tri%20par%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9e1%22%2C%22name%22%3A%22Publication%20date%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Sorting%20options%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22Publication%20date%22%2C%22fr%22%3A%22Date%20de%20publication%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9c9%22%2C%22name%22%3A%22language%20prompt%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22%22%2C%22category%22%3A%22%22%2C%22en%22%3A%22%3Cp%3EWe%20notice%20that%20%3Clang%3E%20is%20the%20default%20language%20in%20your%20browser%20settings.%20Would%20you%20like%20to%20view%20this%20site%20in%20%3Clang%3E%3F%3C%2Fp%3E%22%2C%22fr%22%3A%22%3Cp%3EVotre%20navigateur%20indique%20le%20fran%C3%A7ais%20comme%20langue%20par%20defaut.%20Souhaitez%20vous%20consulter%20le%20site%20en%20fran%C3%A7ais%3F%3C%2Fp%3E%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9ea%22%2C%22name%22%3A%22Access%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Refine%20listing%20options%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22Access%22%2C%22fr%22%3A%22Acc%C3%A8s%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9f8%22%2C%22name%22%3A%22Hide%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Article%20eLetters%20tab%22%2C%22category%22%3A%22button%20label%22%2C%22en%22%3A%22Hide%22%2C%22fr%22%3A%22Dissimuler%22%7D%2C%7B%22_id%22%3A%225b754bedf620e6600be6863c%22%2C%22name%22%3A%22For%20more%20information%2C%20please%20view%20our%20content%20sharing%20policy%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Share%20modal%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22For%20more%20information%2C%20please%20view%20our%20content%20sharing%20policy%22%2C%22fr%22%3A%22Pour%20plus%20d'information%2C%20veuillez-vous%20r%C3%A9f%C3%A9rer%20%C3%A0%20notre%20politique%20de%20partage%20de%20contenus%22%7D%2C%7B%22_id%22%3A%225b754bedf620e6600be6863e%22%2C%22name%22%3A%22view%20our%20content%20sharing%20policy%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Share%20modal%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22view%20our%20content%20sharing%20policy%22%2C%22fr%22%3A%22r%C3%A9f%C3%A9rer%20%C3%A0%20notre%20politique%20de%20partage%20de%20contenus%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9ce%22%2C%22name%22%3A%22Jan%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Metrics%20tab%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22Jan%22%2C%22fr%22%3A%22Jan%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063aca2e%22%2C%22name%22%3A%22Share%20via%20email%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Share%20modal%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22Share%20via%20email%22%2C%22fr%22%3A%22Partager%20par%20e-mail%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063aca2d%22%2C%22name%22%3A%22OR%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Share%20modal%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22OR%22%2C%22fr%22%3A%22OU%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac94d%22%2C%22name%22%3A%22Change%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Export%20citation%20modal%22%2C%22category%22%3A%22button%20label%22%2C%22en%22%3A%22Change%22%2C%22fr%22%3A%22Modifier%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac94f%22%2C%22name%22%3A%22Download%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Export%20citation%20modal%22%2C%22category%22%3A%22button%20label%22%2C%22en%22%3A%22Download%22%2C%22fr%22%3A%22T%C3%A9l%C3%A9charger%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063aca2f%22%2C%22name%22%3A%22Cancel%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Share%20modal%22%2C%22category%22%3A%22button%20label%22%2C%22en%22%3A%22Cancel%22%2C%22fr%22%3A%22Annuler%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9e4%22%2C%22name%22%3A%22View%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Article%20entry%22%2C%22category%22%3A%22label%20with%20hyperlink%22%2C%22en%22%3A%22View%22%2C%22fr%22%3A%22Voir%22%7D%2C%7B%22_id%22%3A%225b754bedf620e6600be68639%22%2C%22name%22%3A%22Anyone%20you%20share%20the%20following%20link%20with%20will%20be%20able%20to%20freely%20read%20this%20content%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Share%20modal%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22Anyone%20you%20share%20the%20following%20link%20with%20will%20be%20able%20to%20freely%20read%20this%20content%22%2C%22fr%22%3A%22%EF%BB%BFToute%20personne%20avec%20qui%20vous%20partagez%20le%20lien%20suivant%20pourra%20acc%C3%A9der%20gratuitement%20%C3%A0%20cet%20article%22%7D%2C%7B%22_id%22%3A%225b754bedf620e6600be68641%22%2C%22name%22%3A%22Within%20PDF%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Share%20modal%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22Within%20PDF%22%2C%22fr%22%3A%22Par%20PDF%22%7D%2C%7B%22_id%22%3A%225bb361659f569ab5071836a8%22%2C%22name%22%3A%22October%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22dates%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22October%22%2C%22fr%22%3A%22Octobre%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9cc%22%2C%22name%22%3A%22accept%20option%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22%22%2C%22category%22%3A%22%22%2C%22en%22%3A%22Yes%2C%20please%20switch%20to%20%3Clang%3E%22%2C%22fr%22%3A%22Oui%2C%20voir%20le%20site%20en%20fran%C3%A7ais%22%7D%2C%7B%22_id%22%3A%225b714525e54e3d290be053d5%22%2C%22name%22%3A%22If%20you%20have%20any%20queries%20about%20this%20please%20contact%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22%22%2C%22category%22%3A%22%22%2C%22en%22%3A%22If%20you%20have%20any%20queries%20about%20this%20please%20contact%22%2C%22fr%22%3A%22Pour%20toutes%20questions%20veuillez%20svp%20contacter%22%7D%2C%7B%22_id%22%3A%225bb361659f569ab50718369f%22%2C%22name%22%3A%22January%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22dates%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22January%22%2C%22fr%22%3A%22Janvier%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9de%22%2C%22name%22%3A%22Title%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Sorting%20options%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22Title%22%2C%22fr%22%3A%22Titre%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9d9%22%2C%22name%22%3A%22Dec%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Metrics%20tab%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22Dec%22%2C%22fr%22%3A%22D%C3%A9c.%22%7D%2C%7B%22_id%22%3A%225b714525e54e3d290be053d4%22%2C%22name%22%3A%22This%20alert%20has%20been%20successfully%20added.%20Our%20records%20show%20that%20there%20is%20an%20error%20with%20the%20email%20address%20you%20have%20provided%20and%20therefore%20we%20are%20currently%20unable%20to%20send%20content%20alerts%20to%20you.%20The%20email%20address%20we%20are%20attempting%20to%20send%20your%20alerts%20to%20is%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22%22%2C%22category%22%3A%22%22%2C%22en%22%3A%22This%20alert%20has%20been%20successfully%20added.%20Our%20records%20show%20that%20there%20is%20an%20error%20with%20the%20email%20address%20you%20have%20provided%20and%20therefore%20we%20are%20currently%20unable%20to%20send%20content%20alerts%20to%20you.%20The%20email%20address%20we%20are%20attempting%20to%20send%20your%20alerts%20to%20is%22%2C%22fr%22%3A%22Alerte%20ajout%C3%A9e%20avec%20succ%C3%A8s.%20D%C3%BB%20a%20un%20probl%C3%A8me%20avec%20votre%20adresse%20e-mail%20nous%20ne%20sommes%20actuellement%20pas%20en%20mesure%20de%20vous%20envoyer%20des%20alertes%20par%20e-mail.%20L'adresse%20e-mail%20%C3%A0%20laquelle%20nous%20n'arrivons%20pas%20%C3%A0%20envoyer%20les%20alertes%20est%22%7D%2C%7B%22_id%22%3A%225bb361659f569ab5071836a9%22%2C%22name%22%3A%22November%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22dates%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22November%22%2C%22fr%22%3A%22Novembre%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac950%22%2C%22name%22%3A%22Export%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Export%20citation%20modal%22%2C%22category%22%3A%22button%20label%22%2C%22en%22%3A%22Export%22%2C%22fr%22%3A%22Exporter%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9d7%22%2C%22name%22%3A%22Oct%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Metrics%20tab%22%2C%22category%22%3A%22label%22%2C%22en%22%3A%22Oct%22%2C%22fr%22%3A%22Oct.%22%7D%2C%7B%22_id%22%3A%225a7c10d5ab0af516063ac9e7%22%2C%22name%22%3A%22Search%20society%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22contextual%20search%22%2C%22category%22%3A%22searchbox%20placeholder%20text%22%2C%22en%22%3A%22Search%20%3CSocietyName%3E%22%2C%22fr%22%3A%22Rechercher%20dans%20%3CSocietyName%3E%22%7D%2C%7B%22_id%22%3A%225bb361659f569ab5071836a1%22%2C%22name%22%3A%22March%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22dates%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22March%22%2C%22fr%22%3A%22Mars%22%7D%2C%7B%22_id%22%3A%225bb361659f569ab5071836a4%22%2C%22name%22%3A%22June%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22dates%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22June%22%2C%22fr%22%3A%22Juin%22%7D%2C%7B%22_id%22%3A%225bb361659f569ab5071836a5%22%2C%22name%22%3A%22July%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22dates%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22July%22%2C%22fr%22%3A%22Juillet%22%7D%2C%7B%22_id%22%3A%226568350e9421515f267b9929%22%2C%22name%22%3A%22Copy%20and%20paste%20a%20formatted%20citation%20or%20download%20in%20your%20chosen%20format%22%2C%22area%22%3A%22Client%20side%22%2C%22element%22%3A%22Export%20citation%20modal%22%2C%22category%22%3A%22text%22%2C%22en%22%3A%22Copy%20and%20paste%20a%20formatted%20citation%20or%20download%20in%20your%20chosen%20format%22%2C%22fr%22%3A%22Copiez%20et%20collez%20la%20citation%20format%C3%A9e%20ou%20t%C3%A9l%C3%A9chargez-la%20dans%20le%20format%20de%20votre%20choix%22%7D%5D'; </script> <script type="text/javascript"> function handleImageError(element) { const $element = $(element); $element.css('pointer-events', 'none'); $element.attr('tabindex', '-1'); } </script> <meta name="shareaholic:site_id" content="b60ec523a5bee2ad04c630bf0d3aa388" /> <link rel="preload" href="https://cdn.shareaholic.net/assets/pub/shareaholic.js" as="script" /> <script data-cfasync="false" async="true" src="https://cdn.shareaholic.net/assets/pub/shareaholic.js"></script> <script> $(document).ready(function () { AOP.loadScript(AOP.baseUrl + '/cambridge-core/public/js/a11y-1.0.40.min.js', function () { if(!window.cambridgeA11y) { console.error('cambridgeA11y not loaded') return } const { ShareaholicAccessibilityHandler, FocusTrapKeydownHandler } = window.cambridgeA11y new ShareaholicAccessibilityHandler(new FocusTrapKeydownHandler()).fixAccessibilityForShareaholicModal(); }) }) </script> <script type="text/javascript"> var AOP = AOP || {}; AOP.focusTrap = function (modalId, firstFocusableElement) { var modalElement = document.getElementById(modalId) modalElement.removeAttribute('tabindex') if (!modalElement) { return; } // Get all focusable elements inside the modal var focusableElements = Array.from(modalElement.querySelectorAll( 'a[href], area[href], input:not([disabled]):not([type="hidden"]), select:not([disabled]), textarea:not([disabled]), button:not([disabled]), [tabindex="0"]' )) focusableElements = focusableElements.filter(el => { const ancestorElement = el.offsetParent || el.parentElement const visibleElement = el.style.visibility !== 'hidden' return ancestorElement && visibleElement }) if (!focusableElements) { return; } var firstFocusable = firstFocusableElement || focusableElements[0] var lastFocusable = focusableElements[focusableElements.length - 1] var TAB_KEY_CODE = 'Tab' firstFocusable.focus() $(firstFocusable).on('keydown', function (event) { var isTabPressed = event.key === TAB_KEY_CODE if (!isTabPressed) { return } if (event.shiftKey) { event.preventDefault() lastFocusable.focus() } }) $(`#${modalId}`).on('keydown', function (event) { var isTabPressed = event.key === TAB_KEY_CODE if (!isTabPressed) { return } if (event.shiftKey) { if (document.activeElement === firstFocusable || document.activeElement === modalElement) { event.preventDefault() lastFocusable.focus() } } else { if (document.activeElement === lastFocusable || document.activeElement === modalElement) { event.preventDefault() firstFocusable.focus() } } }) } </script></head> <body> <header> <!-- Google Tag Manager (noscript) --> <noscript><iframe src="https://www.googletagmanager.com/ns.html?id=GTM-NTX72TG" height="0" width="0" style="display:none;visibility:hidden" sandbox="allow-scripts"></iframe></noscript> <!-- End Google Tag Manager (noscript) --> <a class="skiptocontent" href="#maincontent"> Skip to main content </a> <a class="skiptocontent" href="/core/accessibility"> Accessibility help </a> <div class="cookie-message"> <div class="row"> <div class="small-11 columns"> <p>We use cookies to distinguish you from other users and to provide you with a better experience on our websites. Close this message to accept cookies or find out how to <a href="https://www.cambridge.org/about-us/legal-notices/cookies-policy/" target="_blank">manage your cookie settings</a>.</p> </div> <div class="small-1 columns"> <a href="#" onclick="closeMessage();" class="cookie-close"> <!--[if IE 8 ]> <img alt = "Close cookie message" title = "Close cookie message" src = "/core/cambridge-core/public/images/icn_circle__btn_close_white.png"> <![endif]--> <!--[if (gte IE 9)|!(IE)]><!--> <img alt = "Close cookie message" title = "Close cookie message" src = "/core/cambridge-core/public/images/icn_circle__btn_close_white.svg"> <!--<![endif]--> </a> </div> </div> </div> <script> var cookieMessage = $('.cookie-message'); var cookieName = 'EULAW'; var closeMessage = function () { $.cookie(cookieName, true, {expires: 365, path:'/'}); cookieMessage.slideUp().promise().done(function () { cookieMessage.remove(); }); }; $(document).ready(function () { if ($.cookie(cookieName) !== 'true') { cookieMessage.slideDown(); } }); </script> <div id="login-modal" class="reveal-modal small" data-reveal role="dialog" aria-labelledby="loginModalHeader"> <div class="header"> <h2 class="heading_07 margin-bottom" id="loginModalHeader">Login Alert</h2> </div> <div class="wrapper large-margin-top-for-small-only"> <div class="row margin-top"> <div class="large-12 columns"> <div class="panel callout message"></div> </div> </div> <div class="row margin-top"> <div class="small-6 large-6 columns"><a href="#" onclick="$('#login-modal').foundation('reveal', 'close');return false;" class="right small button radius transparent cancel">Cancel</a></div> <div class="small-6 large-6 columns"> <a href="/core/login?ref=/core/journals/robotica/article/robotics-goes-prisma/5DA1E6B0701411F71E5FFC40F2E53847" class="left small radius button blue confirm login">Log in</a> </div> </div> <a href="#" class="close-reveal-modal" aria-label="Close login notification"><span aria-hidden="true">×</span></a> </div> </div> <div id="multilingual-suggestion-modal" class="reveal-modal small" data-reveal> <div class="wrapper large-margin-top-for-small-only"> <div class="row margin-top"> <div class="large-12 columns"> <div id="multilingual-suggestion-modal-message" class="panel callout message"></div> </div> </div> <div class="row margin-top"> <div class="small-6 large-6 columns"><a id="multilingual-suggestion-modal-decline-option" href="#" class="right small button radius transparent cancel"></a></div> <div class="small-6 large-6 columns"> <a id="multilingual-suggestion-modal-accept-option" href="#" class="left small radius button blue confirm login"></a> </div> </div> <a class="close-reveal-modal">×</a> </div> </div> <div class="global-header-wrapper"> <div class="global-header-spacer"></div> <div id="global-header" class="global-header"> <div class="__shared-elements-html ShEl"><div class="__shared-elements-head"> <link rel="stylesheet" href="/aca/shared-elements/_nuxt/entry.BhGMTrWu.css"> <link rel="prefetch" as="style" href="/aca/shared-elements/_nuxt/error-404.B06nACMW.css"> <link rel="prefetch" as="style" href="/aca/shared-elements/_nuxt/error-500.WGRfNq7F.css"> </div><div class="__shared-elements-body"><div id="__sharedElements-q4vo9n"><!--[--><!----><!----><div class="shared-elements"><div class="gh-headerContainer"><div class="gh-globalHeader"><div class="gh-logoContainer"><a href="https://www.cambridge.org/academic" class="gh-cup-logo"><svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="149" height="30" viewbox="0 0 149 30" fill="none" aria-label="Homepage Cambridge University Press" class="gh-logo" data-v-7c4b1471><g clip-path="url(#A)" data-v-7c4b1471><g class="B" data-v-7c4b1471><path d="M147.065 1.786l.036 2.785-.445.114c-.1-1.217-.376-2.211-2.324-2.211h-2.618v5.147h2.247c1.103 0 1.402-.616 1.527-1.679h.447v4.304l-.446.007c-.069-1.029-.311-1.829-1.518-1.829h-2.257v4.52c0 1.314 1.133 1.314 2.332 1.314 2.151 0 3.149-.135 3.75-2.31l.441.098-.602 2.901h-9.774v-.482h.186c.75 0 1.443-.14 1.443-1.162V3.43c0-1.022-.693-1.163-1.443-1.163h-.186v-.481h9.204zm-53.267 0h-5.895v.481h.26c.486 0 1.388 0 1.388 1.525v9.569c0 .815-.748 1.104-1.388 1.104h-.26v.482h5.724v-.482h-.261c-.932 0-1.591-.115-1.591-1.6V8.766h1.454l3.918 6.18h3.371v-.492c-.889-.048-1.311-.356-1.828-1.153l-3.23-4.915.085-.035c1.152-.466 2.615-1.414 2.615-3.286 0-2.207-1.447-3.28-4.363-3.28zm-.749.536c1.685 0 2.804 1.123 2.804 2.858 0 2.138-.903 3.051-2.953 3.051h-1.125l.001-5.826a8.87 8.87 0 0 1 1.273-.083zm13.772-.539h-5.501v.481h.186c.751 0 1.443.142 1.443 1.163V13.3c0 1.021-.692 1.162-1.443 1.162h-.186v.481h5.501v-.481h-.186c-.75 0-1.444-.14-1.444-1.162V3.427c0-1.021.694-1.163 1.444-1.163h.186v-.481zM31.488 8.355c0 4.435 2.709 6.939 6.956 6.939 2.089 0 3.924-.818 5.112-2.354l-.345-.644c-1.055 1.355-2.452 2.089-4.412 2.089-3.487 0-4.927-3.074-4.927-6.324 0-3.652 1.781-5.919 4.647-5.919 2.79 0 3.616 1.617 4.112 3.383l.433-.056-.139-3.046c-1.373-.641-2.776-.945-4.46-.945-1.88 0-3.545.638-4.814 1.775-1.367 1.224-2.162 3.146-2.162 5.102zM59.883 4.33l4.761 10.706h.397l4.553-10.921.401 8.416.019.944c0 .893-.643.931-1.533.984l-.036.002v.475h5.464v-.475c-1.008-.038-1.544-.13-1.622-1.151l-.509-8.729c-.112-1.86.151-2.32 1.632-2.32h.07l.001-.485h-3.654l-4.231 9.975-4.472-9.974h-3.727v.479h.152c1.046 0 1.669.644 1.602 1.779l-.516 8.32c-.069 1.215-.161 2.046-1.657 2.106h-.169v.478h4.439v-.478h-.113c-1.21-.045-1.768-.203-1.689-2.003l.437-8.128zm55.196 10.61c1.826 0 3.63-.436 4.919-1.712 1.28-1.27 2.015-3.056 2.015-4.9 0-1.974-.668-3.636-1.93-4.81-1.28-1.191-3.113-1.741-5.301-1.741l-5.887.005v.482h.187c.749 0 1.443.141 1.443 1.162v9.871c0 1.023-.694 1.163-1.443 1.163h-.187v.483l6.184-.004zM114.1 2.349c3.704 0 5.505 1.986 5.505 6.347 0 2.806-1.514 5.669-4.841 5.669-1.413 0-2.003-.225-2.003-1.621l.001-10.334a17.99 17.99 0 0 1 1.338-.062zm19.119 11.918c-.493.224-1.326.347-2.132.33-3.496 0-5.062-3.303-5.062-6.636 0-3.628 1.855-5.882 4.914-5.882 2.267 0 3.277.887 3.823 3.15l.441-.028-.209-3.141c-1.018-.34-2.623-.651-4.184-.651-3.487 0-7.192 2.112-7.192 6.746 0 4.357 2.81 7.173 7.156 7.173 1.801 0 3.422-.307 4.63-.695l.002-3.592c0-.706.374-1.079 1.082-1.079h.153v-.47h-5.062v.47h.154c.973 0 1.487.374 1.487 1.079l-.001 3.226zM80.916 8.129c2.063 0 3.294 1.147 3.294 3.168 0 1.797-1.21 3.117-2.814 3.117-1.336 0-1.746-.368-1.746-1.654V8.129h1.265zm-.073-5.86c1.733 0 2.612.865 2.612 2.723 0 1.682-.999 2.562-2.813 2.562h-.991V2.269h1.192zm1.507 5.51c1.616-.261 3.451-1.032 3.451-3.117 0-1.883-1.511-2.878-4.256-2.878l-5.846.003v.481h.261c.484 0 1.387 0 1.387 1.524v9.548c0 .817-.748 1.121-1.387 1.121h-.261v.477l6.451.001c2.463 0 4.358-1.655 4.358-3.72 0-.866-.303-1.609-.877-2.195-.742-.759-1.624-1.124-3.281-1.245zM50.387 1.784l4.289 11.418c.318.903.783 1.258 1.463 1.258h.04l-.001.486H51.08v-.486h.116c.991 0 1.38-.415 1.082-1.327L51.24 10.24h-4.149l-.956 2.654c-.42 1.245.237 1.553 1.201 1.553h.174l.001.494h-4.008l.002-.494h.045c.811 0 1.358-.498 1.668-1.305 0-.001 4.18-11.36 4.18-11.36l.992.002zm-3.065 7.81h3.684l-1.84-5.127-1.845 5.127zM32.257 20.335c.66 0 .829.18.829.672v4.573c0 1.56 1.153 2.449 3.111 2.449 1.982 0 3.063-1.02 3.063-2.677v-4.141c0-.744.18-.876.793-.876v-.348l-1.093.024a30.18 30.18 0 0 1-1.237-.024v.348c.625.012.925.012.925 1.116v3.997c0 1.272-.757 2.077-2.102 2.077-1.333 0-2.054-.84-2.054-2.089v-4.43c0-.612.264-.672.781-.672h.084v-.348l-1.573.024c-.409 0-.841-.012-1.526-.024v.348zm9.028 0c.829.024 1.237.312 1.237.732v5.33c0 .78-.252 1.056-1.177 1.068v.348l1.429-.024a43.1 43.1 0 0 1 1.453.024v-.348h-.048c-.877 0-1.165-.336-1.165-1.128v-4.982l5.909 6.686h.312v-6.578c0-.6.216-1.128.853-1.128h.264v-.348l-1.429.024c-.288 0-.757-.012-1.393-.024v.348c.853 0 1.213.192 1.213 1.14v4.189l-5.045-5.678-1.081.024c-.432 0-.877-.012-1.333-.024v.348h0zm13.441 7.13c-.685-.012-.853-.12-.853-.66v-5.678c0-.672.168-.792.877-.792v-.348l-1.501.024c-.564 0-1.177-.012-1.633-.024v.348c.673 0 .865.18.865.696v5.462c0 .78-.168.972-.901.972v.348c.36-.012.709-.024 1.622-.024a56.45 56.45 0 0 1 1.525.024v-.348h0zm1.293-7.13c.601.012.829.072.973.432l2.931 7.238h.372l2.739-6.878c.132-.336.216-.492.288-.588.096-.12.18-.204.444-.204h.072v-.348l-1.057.024c-.325 0-.565-.012-1.057-.024v.348c.517.012.853.012.853.3 0 .144-.06.336-.156.6L60.631 26l-1.886-4.681c-.144-.348-.18-.528-.18-.66 0-.312.324-.312.829-.324v-.348l-1.742.024c-.541 0-.973-.012-1.633-.024v.348h0zm8.959 0c.877 0 1.009.084 1.009.744v5.642c0 .54-.132.732-1.009.744v.348l2.774-.024 3.375.012.42-1.789h-.312c-.204.552-.336.78-.468.924a1.14 1.14 0 0 1-.589.312c-.252.06-.709.096-1.381.096-.541 0-.865-.048-1.069-.156-.216-.108-.325-.312-.325-.732V23.96h1.55c.492 0 .829.156.829.816v.192l.3-.06c-.012-.324-.036-.648-.036-1.428l.012-.996h-.3c0 .708-.192.948-.877.948h-1.478v-2.977l1.502-.036c.829 0 1.021.084 1.201.24.168.156.288.348.372 1.02l.312-.12c-.036-.456-.048-.9-.048-1.116 0-.156.012-.348.012-.456l-3.231.024c-.853 0-1.694-.012-2.546-.024v.348h0zm7.686 0h.06c.745 0 .937.108.937.792v5.522c0 .612-.144.816-.949.816h-.06v.348a76.17 76.17 0 0 1 1.778-.024l1.682.024v-.348h-.084c-.757 0-.961-.192-.961-.624V24.14h.757l.588 1.068a22.31 22.31 0 0 0 .697 1.092l.672 1.032c.264.408.409.48.613.48.252 0 .793-.024 1.189-.024.276 0 .516.012.817.024v-.348a1.26 1.26 0 0 1-.913-.336c-.228-.204-.516-.552-.841-1.044L77.18 23.9c1.057-.396 1.621-1.044 1.621-1.957 0-1.26-.961-1.957-2.895-1.957l-1.982.024c-.288 0-.757-.012-1.261-.024v.348h0zm2.402.084c.192-.06.421-.084.613-.084 1.057 0 1.681.6 1.681 1.753 0 .828-.36 1.224-.673 1.416-.24.144-.516.228-.961.228h-.661v-3.313zm9.104-.624c-1.501 0-2.534.888-2.534 2.317 0 1.188.757 1.813 1.814 2.341.865.432 1.609.804 1.609 1.693 0 .66-.492 1.404-1.526 1.404-1.057 0-1.501-.684-1.826-1.765l-.3.048c.036.156.096.648.168 1.62.697.324 1.297.504 1.994.504 1.417 0 2.654-.876 2.654-2.317 0-.936-.48-1.704-1.802-2.389-1.153-.6-1.694-.936-1.694-1.68 0-.6.444-1.369 1.465-1.369.757 0 1.177.42 1.369 1.332l.312-.036c-.06-.504-.12-1.092-.12-1.549l-.757-.084c-.432-.048-.697-.072-.829-.072zm6.391 7.671c-.685-.012-.853-.12-.853-.66v-5.678c0-.672.168-.792.877-.792v-.348l-1.502.024c-.564 0-1.177-.012-1.633-.024v.348c.673 0 .865.18.865.696v5.462c0 .78-.168.972-.901.972v.348c.36-.012.709-.024 1.621-.024a56.9 56.9 0 0 1 1.525.024v-.348zm1.517-5.966c.144-.792.409-1.044 1.333-1.044h1.633v6.002c0 .936-.192 1.008-.901 1.008h-.156v.348l1.79-.024 1.766.024v-.348c-.865-.012-1.105-.156-1.105-.864v-6.146h1.67c.432 0 .721.072.865.216.132.132.204.264.24.9l.276-.108c0-.408 0-.852.144-1.789l-.204-.096c-.18.3-.288.396-.673.396h-5.933c-.348 0-.529-.132-.613-.396h-.252a10.45 10.45 0 0 1-.216 1.921h.336zm8.746-1.164c.433 0 .745.048.913.276l2.558 3.469v2.653c0 .672-.216.72-1.045.732v.348l1.826-.024 1.694.024v-.348c-.853 0-1.057-.144-1.057-.624v-3.085l2.126-2.977c.252-.348.504-.444.828-.444v-.348l-1.045.024c-.252 0-.672-.012-1.213-.024v.348c.493.012.793.024.793.24 0 .12-.144.348-.24.492l-1.477 2.221-1.73-2.329c-.156-.216-.18-.324-.18-.384 0-.204.288-.228.624-.24v-.348l-1.693.024c-.565 0-.889-.012-1.682-.024v.348zm14.689.036c.12-.012.36-.036.636-.036 1.405 0 1.453 1.729 1.453 1.981 0 1.116-.492 1.717-1.465 1.717h-.06v.312h.18c1.393 0 1.874-.408 2.198-.744.264-.276.565-.768.565-1.488 0-1.236-.805-2.125-2.607-2.125l-1.717.024c-.385 0-.985-.012-1.442-.024v.348c.565 0 .853.036.853.708v5.63c0 .672-.24.792-.865.792v.348l1.586-.024 1.838.024v-.348c-.649 0-1.154 0-1.154-.756l.001-6.338zm4.466-.036h.06c.744 0 .937.108.937.792v5.522c0 .612-.145.816-.949.816h-.06v.348c.42-.012 1.225-.024 1.777-.024l1.682.024v-.348h-.084c-.757 0-.961-.192-.961-.624V24.14h.757l.588 1.068c.216.372.529.84.697 1.092l.672 1.032c.265.408.409.48.613.48.252 0 .793-.024 1.189-.024.276 0 .517.012.817.024v-.348a1.26 1.26 0 0 1-.913-.336c-.228-.204-.516-.552-.841-1.044l-1.465-2.185c1.057-.396 1.621-1.044 1.621-1.957 0-1.26-.96-1.957-2.894-1.957l-1.982.024c-.288 0-.757-.012-1.261-.024v.348h0zm2.402.084a2.15 2.15 0 0 1 .613-.084c1.057 0 1.681.6 1.681 1.753 0 .828-.36 1.224-.673 1.416-.24.144-.516.228-.96.228h-.661V20.42zm6.516-.084c.877 0 1.009.084 1.009.744v5.642c0 .54-.132.732-1.009.744v.348l2.775-.024 3.375.012.42-1.789h-.312c-.204.552-.336.78-.469.924-.144.144-.3.24-.588.312-.252.06-.709.096-1.381.096-.541 0-.865-.048-1.069-.156-.217-.108-.325-.312-.325-.732V23.96h1.55c.492 0 .828.156.828.816v.192l.301-.06c-.012-.324-.036-.648-.036-1.428l.012-.996h-.301c0 .708-.192.948-.876.948h-1.478v-2.977l1.502-.036c.828 0 1.021.084 1.201.24.168.156.288.348.372 1.02l.312-.12c-.036-.456-.048-.9-.048-1.116 0-.156.012-.348.012-.456l-3.231.024c-.852 0-1.693-.012-2.546-.024v.348zm10.51-.54c-1.501 0-2.534.888-2.534 2.317 0 1.188.757 1.813 1.813 2.341.865.432 1.61.804 1.61 1.693 0 .66-.492 1.404-1.526 1.404-1.056 0-1.501-.684-1.825-1.765l-.3.048c.036.156.096.648.168 1.62.696.324 1.297.504 1.994.504 1.417 0 2.654-.876 2.654-2.317 0-.936-.481-1.704-1.802-2.389-1.153-.6-1.693-.936-1.693-1.68 0-.6.444-1.369 1.465-1.369.757 0 1.177.42 1.369 1.332l.313-.036c-.06-.504-.12-1.092-.12-1.549l-.757-.084c-.432-.048-.697-.072-.829-.072zm6.233 0c-1.501 0-2.534.888-2.534 2.317 0 1.188.757 1.813 1.813 2.341.865.432 1.61.804 1.61 1.693 0 .66-.493 1.404-1.526 1.404-1.056 0-1.501-.684-1.825-1.765l-.3.048c.036.156.096.648.168 1.62.696.324 1.297.504 1.994.504 1.417 0 2.654-.876 2.654-2.317 0-.936-.48-1.704-1.802-2.389-1.153-.6-1.693-.936-1.693-1.68 0-.6.444-1.369 1.465-1.369.757 0 1.177.42 1.369 1.332l.313-.036c-.06-.504-.12-1.092-.12-1.549l-.757-.084c-.432-.048-.697-.072-.829-.072zM0 0v15.599c0 7.67 4.03 9.859 5.967 10.911.356.193 6.333 3.162 6.715 3.352l.258.138.257-.137 6.716-3.352C21.85 25.459 25.88 23.27 25.88 15.6V0H0z" data-v-7c4b1471></path></g><path d="M12.939 29.365l2.633-1.309V15.762h9.746l.001-.163V10.5h-9.747V.559h-5.266V10.5H.559v5.099l.001.163h9.746v12.294l2.633 1.309z" fill="#fff" data-v-7c4b1471></path><g fill="#e73337" data-v-7c4b1471><path d="M.559.559h9.747V10.5H.559V.559zm15.015 0h9.747V10.5h-9.747V.559zm-5.269 15.205H.559c.056 7.126 3.712 9.191 5.674 10.256.199.108 2.281 1.146 4.073 2.038V15.764zm5.269 0v12.294l4.073-2.038c1.961-1.065 5.618-3.13 5.673-10.256h-9.746z" data-v-7c4b1471></path></g><g class="B" data-v-7c4b1471><path d="M19.073 13.234l.201-.239-.254-.302-.255.302.202.239c.003.12.01 1.175-.611 1.552 0 0 .207.04.486-.014a3.1 3.1 0 0 0 .179.588c.13-.313.179-.588.179-.588.278.054.486.014.486.014-.622-.377-.614-1.431-.611-1.552zm1.59-1.791l.201-.239-.254-.302-.255.302.202.239c.003.121.011 1.175-.611 1.551 0 0 .207.04.486-.014a3.09 3.09 0 0 0 .179.588c.13-.314.179-.588.179-.588.278.053.486.014.486.014-.622-.377-.614-1.431-.611-1.551zm1.586 1.791l.201-.239-.254-.302-.254.302.201.239c.003.12.01 1.175-.612 1.552 0 0 .207.04.486-.014a3.07 3.07 0 0 0 .179.588c.13-.313.179-.588.179-.588.278.054.486.014.486.014-.622-.377-.614-1.431-.611-1.552zm1.586-1.791l.201-.239-.254-.302-.254.302.201.239c.003.121.011 1.175-.611 1.551 0 0 .207.04.485-.014a3.11 3.11 0 0 0 .179.588c.131-.314.179-.588.179-.588.278.053.485.014.485.014-.622-.377-.614-1.431-.611-1.551zm-6.348 0l.201-.239-.255-.302-.254.302.201.239c.003.121.011 1.175-.611 1.551 0 0 .207.04.486-.014a3.11 3.11 0 0 0 .179.588c.13-.314.179-.588.179-.588.278.054.486.014.486.014-.622-.377-.614-1.431-.611-1.551zM3.733 13.234l.201-.239-.254-.302-.254.302.201.239c.003.12.011 1.175-.611 1.552 0 0 .207.04.486-.014a3.07 3.07 0 0 0 .179.588c.13-.313.179-.588.179-.588.278.054.486.014.486.014-.622-.377-.615-1.431-.611-1.552zm1.59-1.791l.201-.239-.254-.302-.254.302.201.239c.003.121.011 1.175-.612 1.551 0 0 .207.04.486-.014 0 0 .048.275.178.588.131-.314.179-.588.179-.588.278.054.486.014.486.014-.622-.377-.614-1.431-.611-1.552zm1.586 1.791l.201-.239-.254-.302-.255.302.202.239c.003.12.01 1.175-.611 1.552 0 0 .207.04.485-.014a3.07 3.07 0 0 0 .179.588c.13-.313.179-.588.179-.588.278.054.486.014.486.014-.622-.377-.614-1.431-.611-1.552zm1.586-1.791l.201-.239-.254-.302-.254.302.201.239c.003.121.011 1.175-.611 1.551 0 0 .207.04.485-.014 0 0 .049.274.179.588.13-.314.179-.588.179-.588.278.053.486.014.486.014-.622-.377-.614-1.431-.611-1.551zm-6.348 0l.202-.239-.255-.302-.254.302.201.239c.003.121.011 1.175-.611 1.551 0 0 .207.04.486-.014a3.12 3.12 0 0 0 .179.588c.131-.314.179-.588.179-.588.278.054.485.014.485.014-.622-.377-.614-1.431-.611-1.551zm10.844 5.803l.201-.239-.254-.302-.254.302.202.239c.003.121.01 1.175-.612 1.551 0 0 .207.04.486-.014a3.07 3.07 0 0 0 .179.588c.13-.314.179-.588.179-.588.278.054.486.014.486.014-.622-.377-.615-1.431-.611-1.551zm0 5.387l.201-.239-.254-.302-.254.302.202.239c.003.12.01 1.175-.612 1.552 0 0 .207.04.486-.014a3.07 3.07 0 0 0 .179.588c.13-.314.179-.588.179-.588.278.054.486.014.486.014-.622-.377-.615-1.431-.611-1.552zm1.48-2.694l.202-.239-.255-.302-.254.302.201.239c.003.121.011 1.175-.611 1.552 0 0 .207.04.486-.014a3.11 3.11 0 0 0 .178.589c.131-.314.179-.589.179-.589.278.054.485.014.485.014-.622-.377-.614-1.431-.611-1.552zm-2.957 0l.202-.239-.255-.302-.254.302.201.239c.003.121.011 1.175-.611 1.552 0 0 .207.04.486-.014a3.09 3.09 0 0 0 .179.589c.131-.314.179-.589.179-.589.278.054.486.014.486.014-.622-.377-.614-1.431-.611-1.552zm2.957 5.387l.202-.239-.255-.302-.254.302.201.239c.003.121.011 1.175-.611 1.552 0 0 .207.04.486-.014a3.11 3.11 0 0 0 .178.588c.131-.314.179-.588.179-.588.278.054.485.014.485.014-.622-.377-.614-1.431-.611-1.552zm-2.957 0l.202-.239-.255-.302-.254.302.201.239c.003.121.011 1.175-.611 1.552 0 0 .207.04.486-.014a3.07 3.07 0 0 0 .179.588c.131-.314.179-.588.179-.588.278.054.486.014.486.014-.622-.377-.614-1.431-.611-1.552zm1.477-20.59l.201-.239-.254-.302-.254.302.202.239c.003.12.01 1.175-.612 1.552 0 0 .207.04.486-.014a3.12 3.12 0 0 0 .179.588c.13-.314.179-.588.179-.588.278.054.486.014.486.014-.622-.377-.615-1.431-.611-1.552zm1.48-2.693l.202-.239-.255-.302-.254.302.201.239c.003.12.011 1.175-.611 1.552 0 0 .207.04.486-.014 0 0 .048.275.178.588.131-.314.179-.588.179-.588.278.054.485.014.485.014-.622-.377-.614-1.431-.611-1.552zm-2.957 0l.202-.239-.255-.302-.254.302.201.239c.003.12.011 1.175-.611 1.552 0 0 .207.04.486-.014a3.13 3.13 0 0 0 .179.588c.131-.314.179-.588.179-.588.278.054.486.014.486.014-.622-.377-.614-1.431-.611-1.552zm2.957 5.387l.202-.239-.255-.302-.254.302.201.239c.003.12.011 1.175-.611 1.552 0 0 .207.04.486-.014a3.1 3.1 0 0 0 .178.588c.131-.314.179-.588.179-.588.278.054.485.014.485.014-.622-.377-.614-1.431-.611-1.552zm-2.957 0l.202-.239-.255-.302-.254.302.201.239c.003.12.011 1.175-.611 1.552 0 0 .207.04.486-.014a3.07 3.07 0 0 0 .179.588c.131-.314.179-.588.179-.588.278.054.486.014.486.014-.622-.377-.614-1.431-.611-1.552zm11.221 13.502c.144.19.346.314.512.314a1.06 1.06 0 0 0 .223-.034.43.43 0 0 1 .11-.016c.04 0 .057.015.057.052a.56.56 0 0 1-.043.155c-.026.062-.043.112-.049.148l-.033.173-.083.148-.115.107-.033.034v.074.037c0 .202-.06.316-.223.425-.013.008-.033.022-.058.041l-.017.067-.082.288a1.73 1.73 0 0 1-.157.231c-.1.131-.165.237-.206.338-.034.083-.06.107-.114.107-.081 0-.162-.102-.176-.222a.42.42 0 0 1-.041.05l-.098.066-.158.107-.131.099-.091.091c-.016.016-.033.025-.047.025-.042 0-.085-.071-.126-.207l-.082-.271c-.022-.073-.034-.163-.034-.267 0-.137.009-.185.058-.295-.165.017-.212.027-.297.058a.29.29 0 0 1-.072.016c-.027 0-.044-.019-.044-.051a.51.51 0 0 1 .033-.122l.108-.28a.7.7 0 0 1 .566-.416.72.72 0 0 1 .211.044.45.45 0 0 0 .125.026c.096 0 .172-.07.172-.158 0-.044-.029-.091-.066-.108-.014-.007-.113-.037-.297-.091-.139-.041-.208-.092-.364-.273-.113-.131-.158-.168-.199-.168-.032 0-.049.021-.049.058l.008.11.003.051c0 .229-.068.448-.234.75l-.232.42c-.11.2-.157.352-.157.504 0 .166.072.313.165.339l.124.032a.09.09 0 0 1 .041.026c.011.013.026.034.026.047s-.015.046-.042.076c-.015.016-.064.08-.149.189-.142.186-.232.253-.405.305-.1.242-.263.383-.554.479l-.033.057c-.098.174-.317.331-.456.331h-.032c-.018.151-.126.308-.289.421l-.19.139a.34.34 0 0 0-.041.075c-.037.082-.072.116-.118.116-.088 0-.161-.082-.212-.24l-.075.049-.157.107c-.038.018-.135.047-.289.083-.203.047-.378.133-.422.207l-.074.123-.034.014c-.04 0-.087-.057-.107-.13a2.03 2.03 0 0 1-.075-.464c0-.293.097-.484.397-.783l-.001-.071c0-.456.348-.765.86-.765l.083.002.281.017.026.001c.152 0 .292-.058.444-.183.202-.165.261-.266.261-.451 0-.059-.016-.126-.079-.333a1.34 1.34 0 0 1-.061-.388c0-.341.25-.765.59-.999.087-.061.152-.141.152-.187 0-.019-.01-.034-.023-.034-.022 0-.059.023-.105.064-.04.036-.107.078-.198.125l-.132.066-.149.058c-.046.017-.069.042-.069.073 0 .018.007.044.019.075s.016.047.016.061-.014.052-.042.096l-.115.19c-.113.186-.195.266-.339.33-.093.272-.226.41-.421.438-.156.314-.294.423-.57.454-.038.121-.105.221-.248.38-.059.064-.095.109-.107.132-.065.116-.07.123-.1.123-.038 0-.124-.057-.189-.123a.5.5 0 0 1-.074-.1l-.075-.132c-.024-.043-.049-.062-.08-.062l-.053.003-.182.025a.58.58 0 0 0-.331.19l-.099.149c-.01.015-.025.024-.039.024-.074 0-.119-.163-.119-.433 0-.237.036-.395.118-.508l.231-.322c.032-.045.05-.089.05-.126a.17.17 0 0 0-.041-.106c-.049-.058-.096-.093-.132-.099l-.174-.024c-.031-.005-.044-.012-.044-.026s.009-.034.027-.057a.79.79 0 0 1 .207-.182c.205-.125.339-.174.477-.174a.81.81 0 0 1 .432.142l.239.157c.034.023.074.034.115.034.111 0 .194-.066.272-.215.076-.144.109-.26.109-.383a.51.51 0 0 0-.53-.534c-.143 0-.29.056-.43.165-.042.033-.07.064-.083.091l-.066.149c-.015.036-.039.057-.059.057-.042 0-.135-.119-.213-.272-.041-.08-.08-.179-.115-.298-.049-.161-.071-.282-.071-.412l.012-.224a.83.83 0 0 1-.149-.511.92.92 0 0 1 .059-.29.61.61 0 0 1-.091-.368l.008-.103.025-.115.017-.099c-.08-.104-.102-.174-.182-.536-.044-.205-.071-.294-.106-.347-.017-.023-.025-.041-.025-.05 0-.048.115-.092.247-.092l.116.009c-.029-.076-.035-.106-.035-.166a1.03 1.03 0 0 1 .06-.329c.044-.126.053-.161.053-.202 0-.098-.015-.197-.036-.252l-.066-.165a.23.23 0 0 1-.021-.085c0-.017.011-.023.041-.023.162 0 .472.175.715.405s.32.413.32.752l-.014.181c.225.197.323.36.323.537 0 .129-.042.215-.166.337-.223.222-.273.337-.273.622 0 .438.131.675.373.675.213 0 .377-.231.404-.57l.025-.314a3.22 3.22 0 0 1 .041-.297c.014-.075.025-.174.025-.215 0-.111-.073-.252-.29-.504-.232-.271-.31-.417-.31-.629 0-.254.176-.44.414-.44.059 0 .089.009.16.045.003-.144.01-.17.091-.372.064-.159.133-.272.165-.272.011 0 .028.012.049.033a.86.86 0 0 0 .108.09l.083.058a.75.75 0 0 1 .074.075c.051-.185.104-.272.256-.421.116-.113.159-.148.181-.148s.038.015.059.041l.099.131.14.158c.054.059.073.111.099.255l.082-.049.132-.074a.97.97 0 0 0 .1-.066c.022-.016.041-.026.052-.026s.029.024.039.059l.049.19.108.215c.032.064.042.114.042.212v.052l.055-.002c.163 0 .303.192.303.414 0 .294-.235.674-.54.876-.028.099-.035.139-.035.219 0 .657.173.794 1.183.945a5.56 5.56 0 0 1 .677.14l.653.215c.223.074.373.107.489.107.123 0 .191-.035.191-.1 0-.113-.165-.214-.672-.412l-.719-.28c-.731-.286-1.076-.731-1.076-1.388 0-.764.574-1.338 1.34-1.338.289 0 .57.06.976.207l.605.198c.101 0 .156-.069.171-.214.013-.124.047-.174.115-.174.101 0 .212.094.29.248.068.134.107.291.107.428a.47.47 0 0 1-.405.504.8.8 0 0 1-.665.342c-.329 0-.492-.065-1.054-.416-.201-.126-.287-.157-.434-.157-.197 0-.343.122-.343.287 0 .151.122.251.397.324.164.043.188.055.273.14.066-.016.096-.02.137-.02.268 0 .599.145.879.384.509.157.962.706.962 1.167 0 .509-.441.965-.935.965a3.37 3.37 0 0 1-.639-.102z" data-v-7c4b1471></path></g><g class="C" data-v-7c4b1471><path d="M22.437 23.219l.033-.091.033-.116c.006-.021.037-.068.091-.139.084-.11.126-.249.126-.411 0-.051-.013-.098-.044-.16-.016-.032-.025-.058-.025-.076 0-.011.006-.024.017-.038s.022-.023.033-.025l.005-.001c.021 0 .057.024.085.058.035.043.066.066.084.066.037 0 .076-.063.098-.157l.017-.083c0-.04-.033-.144-.074-.231-.016-.033-.024-.061-.024-.084 0-.029.019-.053.046-.053s.063.026.11.07c.023.022.045.034.063.034.052 0 .089-.053.128-.182l.033-.129c0-.024-.01-.038-.029-.038l-.029.002-.098.01c-.151 0-.364-.104-.464-.224l-.231-.281-.206-.273-.19-.124c-.031-.02-.049-.045-.049-.067 0-.031.031-.052.079-.052s.179.048.425.152a2.58 2.58 0 0 0 .837.183c.395 0 .721-.319.721-.707 0-.212-.095-.408-.327-.681-.043-.051-.118-.101-.153-.101-.027 0-.046.02-.046.049a.26.26 0 0 0 .025.093c.01.026.016.052.016.076 0 .038-.014.06-.039.06-.052 0-.106-.085-.151-.234-.055-.184-.162-.294-.414-.421a.89.89 0 0 0-.261-.093c-.016 0-.027.014-.027.036 0 .031.012.065.033.098a.41.41 0 0 1 .067.158v.008c0 .032-.016.05-.046.05-.053 0-.065-.014-.186-.199-.062-.095-.228-.22-.372-.281-.045-.019-.093-.031-.14-.033h-.005c-.03 0-.053.014-.053.032 0 .007.013.033.037.076l.012.047c0 .025-.017.045-.039.045-.05 0-.203-.126-.324-.265-.131-.151-.215-.343-.215-.494 0-.316.302-.596.64-.596.085 0 .157.015.212.042l.289.149.339.224c.195.127.462.223.624.223.06 0 .106-.008.26-.051.081-.022.131-.054.131-.084l-.003-.007-.029-.017-.182-.016c-.101-.01-.33-.113-.512-.232l-.38-.247-.338-.141c-.026-.01-.042-.027-.042-.043s.018-.031.039-.031c0 0 .032.006.094.017l.314.091c.079.022.206.081.38.173.344.183.569.264.734.264.191 0 .331-.131.331-.309 0-.06-.013-.157-.032-.236a.33.33 0 0 0-.05-.107c-.014-.021-.03-.032-.041-.032-.024 0-.037.025-.037.067l.004.057.003.048c0 .152-.059.206-.228.206-.137 0-.322-.055-.742-.222-.394-.155-.64-.214-.884-.214-.622 0-1.167.507-1.167 1.085 0 .499.332.882 1.011 1.161l.702.289c.581.239.794.41.794.636 0 .188-.175.309-.448.309-.118 0-.265-.032-.536-.119-.657-.209-1.085-.296-1.461-.296-.613 0-1.049.168-1.398.536-.046.049-.073.063-.114.063-.028 0-.056-.011-.118-.047a.4.4 0 0 0-.181-.059c-.06 0-.154.052-.198.109l-.082.107c-.024.031-.051.05-.073.05-.06 0-.166-.102-.232-.224-.041-.074-.069-.147-.082-.215-.011-.052-.017-.137-.017-.226l.008-.153.025-.165.025-.141c.006-.02.022-.033.04-.033.043 0 .062.039.062.129l-.002.052-.002.037c0 .235.093.45.196.45.081 0 .152-.144.211-.43.012-.057.035-.09.063-.09s.053.042.078.165c.015.08.068.191.124.264.032.042.066.067.094.067.068 0 .111-.101.145-.339.014-.092.041-.15.073-.15.014 0 .026.007.035.018a1.3 1.3 0 0 1 .058.091c.02.034.052.062.091.082s.074.033.096.033a.12.12 0 0 0 .077-.033c.037-.033.058-.058.058-.068s-.017-.038-.049-.073c-.054-.058-.096-.136-.149-.273L20 19.462c-.013-.03-.027-.05-.042-.058l-.026-.007-.049.007-.173.049-.054.009c-.023 0-.039-.015-.039-.039s.03-.062.068-.092c.209-.166.333-.445.38-.851.028-.247.06-.34.165-.487.022-.031.041-.049.052-.049.032 0 .05.025.05.073 0 .032-.009.072-.027.117a.29.29 0 0 0-.018.1l.001.031.017.05.091-.033.099-.115a.61.61 0 0 0 .109-.338c0-.079-.019-.134-.066-.19-.052-.062-.088-.087-.126-.087l-.056.004-.059.004c-.053 0-.08-.016-.08-.049l.014-.063.009-.091-.049-.206-.041-.1-.025-.083-.058.041-.124.074c-.032.019-.066.05-.1.091-.117.142-.125.149-.172.149-.03 0-.047-.011-.047-.031l.005-.035a.85.85 0 0 0 .025-.171c0-.144-.105-.361-.231-.481-.173.165-.265.346-.265.521v.099c0 .038-.012.057-.038.057h-.003c-.021-.003-.041-.019-.058-.041l-.099-.132a1.33 1.33 0 0 0-.133-.108l-.066-.057-.033.066c-.082.164-.11.262-.115.397-.004.11-.013.142-.039.142-.013 0-.03-.006-.052-.019-.125-.07-.137-.075-.191-.075-.149 0-.25.106-.25.263 0 .061.035.154.103.275.076.135.189.258.239.258.014 0 .017-.01.017-.051v-.017l-.008-.14-.017-.108-.001-.014c0-.032.012-.046.037-.046.075 0 .14.219.195.655.048.384.154.648.322.801.043.039.067.071.067.089s-.015.026-.033.026c-.024 0-.062-.015-.108-.041a.18.18 0 0 0-.09-.026c-.083 0-.175.096-.191.2l-.075.47-.058.661c-.011.125.006.293.05.495.038.178.058.309.058.391 0 .495-.308.886-.818 1.037l-.529.156c-.329.098-.491.315-.537.718.085-.067.111-.078.24-.098l.156-.025a.43.43 0 0 0 .141-.041c.015-.009.045-.04.091-.091l.14-.156c.053-.06.101-.094.13-.094.02 0 .035.022.035.048s-.02.078-.058.136-.06.135-.06.213c0 .105.028.172.135.324.013-.05.024-.083.033-.099a1.69 1.69 0 0 1 .124-.173c.114-.148.197-.302.198-.372.002-.128.014-.169.046-.169s.033.006.045.079c.003.018.012.035.025.05.019.021.038.033.05.033s.033-.012.066-.033c.057-.038.101-.077.132-.116s.062-.093.082-.149c.028-.072.042-.125.042-.156l-.001-.107c0-.028.012-.051.027-.051s.062.043.089.091c.018.032.03.042.053.042h.005c.03-.003.056-.015.074-.034.101-.098.166-.219.166-.31l-.009-.152-.033-.125-.002-.011c0-.026.019-.048.04-.048.037 0 .078.053.135.174.026.057.051.084.075.084.039 0 .07-.029.148-.142l.074-.107c.016-.024.025-.052.025-.083s-.009-.057-.025-.081l-.067-.099c-.011-.017-.016-.045-.016-.083a.81.81 0 0 1 .124-.388l.107-.207c.013-.034.027-.05.046-.05s.028.02.028.041l-.008.091v.066.066c.011.03.031.042.071.042.025 0 .054-.006.086-.017l.206-.075a1.42 1.42 0 0 0 .19-.091c.324-.179.424-.216.583-.216.095 0 .218.037.392.117.036.017.065.026.085.026s.05-.015.072-.042c.016-.019.024-.042.024-.063 0-.03-.011-.066-.033-.102-.026-.046-.051-.103-.051-.123s.014-.036.038-.036.047.02.062.044l.19.33c.154.274.354.464.529.504l.363.083c.113.025.141.069.141.209a.72.72 0 0 1-.331.616l-.298.191c-.108.068-.198.259-.198.416 0 .102.025.282.058.425l.058-.057.141-.083.124-.124.066-.182.066-.165c.032-.08.07-.132.096-.132s.045.045.045.126l-.001.023v.031c0 .118.015.17.107.348z" data-v-7c4b1471></path><path d="M17.543 25.575c0-.393.143-.564.712-.853 1.123-.569 1.728-1.091 1.728-1.488 0-.08-.026-.197-.1-.445a1.41 1.41 0 0 1-.071-.358c0-.183.151-.46.344-.632l.157-.14c.086-.077.15-.188.15-.261l-.001-.037-.002-.049c0-.082.032-.112.117-.116l.075-.008.064-.017c.027 0 .053.015.069.041l.041.066.058.066c.019.021.038.034.053.034s.033-.008.063-.024c.04-.022.071-.031.1-.031.043 0 .065.023.065.071 0 .166-.107.406-.298.661-.317.429-.421.66-.421.934 0 .215.062.404.182.56.021.028.033.05.033.061 0 .022-.021.059-.058.104l-.05.067c-.066.095-.11.132-.153.132-.032 0-.063-.027-.078-.067l-.033-.09c-.01-.026-.028-.043-.047-.043-.006 0-.02.014-.036.034-.021.027-.032.059-.032.091v.14c0 .056-.02.114-.058.165a.69.69 0 0 1-.116.123c-.031.027-.054.042-.065.042-.024 0-.047-.026-.059-.066l-.05-.165c-.007-.024-.024-.041-.041-.041-.028 0-.051.046-.058.116-.017.187-.049.301-.098.347l-.116.106c-.037.036-.081.061-.105.061s-.045-.011-.052-.027l-.058-.124c-.007-.015-.021-.025-.037-.025-.033 0-.058.066-.07.191-.03.297-.092.43-.256.545a.45.45 0 0 0-.206.231c-.072-.106-.1-.196-.1-.329a.3.3 0 0 1 .034-.157c.034-.062.053-.109.053-.137 0-.02-.013-.036-.03-.036-.008 0-.022.012-.041.032-.023.027-.059.055-.107.083-.149.085-.171.116-.207.281a.42.42 0 0 1-.38.371c-.28.057-.297.064-.397.158l-.01-.147zm-.534-8.514v.083l.001.122a.51.51 0 0 1-.018.134c-.027.115-.033.154-.033.187 0 .158.022.236.115.399.052.09.085.199.085.277 0 .033-.014.053-.036.053-.014 0-.029-.008-.04-.024l-.066-.091c-.053-.072-.103-.091-.237-.091l-.143.009a1.79 1.79 0 0 1 .099.289c.067.27.076.289.223.421s.161.15.161.198c0 .037-.026.067-.057.067l-.055-.008-.033-.003c-.045 0-.086.083-.086.175a.59.59 0 0 0 .053.208c.02.049.04.082.058.1l.083.082a.12.12 0 0 1 .033.085c0 .04-.023.074-.05.074a.19.19 0 0 1-.074-.027c-.021-.01-.04-.016-.058-.016-.037 0-.055.023-.055.068l.013.123.05.214c.011.048.042.102.091.158a.39.39 0 0 0 .124.099l.166.075c.016.012.024.036.024.066s-.024.058-.055.058a.7.7 0 0 1-.086-.017l-.084-.008c-.084 0-.09.005-.09.069a.52.52 0 0 0 .017.128l.033.132c.035.14.08.215.13.215.011 0 .032-.012.06-.033a1.02 1.02 0 0 1 .511-.173c.155 0 .332.019.399.041.038.013.065.02.079.02.02 0 .027-.02.028-.094l.017-.149.033-.256.024-.19v-.108l-.067.058c-.14.132-.229.173-.374.173-.38 0-.598-.427-.598-1.171 0-.307.045-.742.13-1.24.018-.105.026-.178.026-.229 0-.272-.172-.537-.472-.729z" data-v-7c4b1471></path><path d="M17.57 18.91l.074-.074.074-.067c.062-.054.111-.169.111-.254 0-.039-.008-.075-.028-.118-.029-.062-.084-.117-.117-.117-.028 0-.053.068-.064.175l-.017.124-.024.124-.009.124v.082zm-.726 3.377c.085.031.102.045.19.149.115.133.178.165.33.165a.42.42 0 0 0 .117-.016l.107-.033.083-.025c.04-.011.066-.028.066-.042s-.065-.072-.165-.139a1.59 1.59 0 0 0-.198-.116c-.071-.035-.131-.051-.189-.051-.116 0-.228.036-.339.109zm1.113 2.323c0-.266.262-.485.582-.485.16 0 .253.032.253.086 0 .042-.07.1-.223.181l-.24.132-.364.181-.008-.096zm3.363-2.003c.117-.024.157-.054.215-.057l.149.008a.48.48 0 0 0 .099-.066l.116-.082c.165-.113.174-.12.174-.156 0-.046-.067-.068-.206-.068-.26 0-.419.122-.546.422z" data-v-7c4b1471></path></g><g class="B" data-v-7c4b1471><path d="M18.827 17.58l.116.074c.066.042.121.073.165.09.128.053.174.071.174.12a.35.35 0 0 1-.017.094.51.51 0 0 0-.025.116c-.012.112-.08.141-.125.141-.056 0-.114-.086-.206-.248a1.91 1.91 0 0 1-.132-.33l-.005-.032c0-.02.012-.032.036-.032l.018.006z" data-v-7c4b1471></path><path d="M19.942 17.868l-.19.191c-.067.089-.067.167-.083.182s-.032.021-.058.025l-.012.001c-.048 0-.084-.035-.087-.083l-.008-.14-.009-.141v-.008c0-.054.03-.083.073-.092l.176-.049.181-.091c.053-.026.069-.033.078-.033.021 0 .039.022.039.048 0 .012-.006.03-.018.052l-.083.139z" data-v-7c4b1471></path><path d="M19.105 18.431l.124.092a.22.22 0 0 0 .111.028.51.51 0 0 0 .195-.053l.108-.049c.025-.011.047-.018.064-.018.066 0 .118.07.118.157 0 .122-.088.247-.216.429l-.09.191c-.072.103-.12.14-.179.14-.098 0-.146-.056-.293-.348-.106-.208-.149-.336-.149-.438 0-.087.045-.148.111-.148.027 0 .06-.006.096.016z" data-v-7c4b1471></path></g><path d="M19.216 18.884l.008.157c.004.071.063.142.12.142.076 0 .131-.115.16-.29l.019-.115c0-.061-.02-.077-.098-.077l-.044.002h-.058l-.037-.002c-.052 0-.072.04-.072.145l.002.038z" class="C" data-v-7c4b1471></path><path d="M8.359 21.057c.072.182.097.272.107.389l.016.346c.013.347.128.52.347.52.079 0 .145-.021.182-.058l.091-.091.036-.011c.058 0 .09.038.09.107l-.01.134c-.007.06.004.188.033.379a2.25 2.25 0 0 1 .025.333c0 .176-.013.244-.075.402.058.162.091.345.091.499 0 .18-.029.3-.099.402.033.142.041.199.041.28s-.011.187-.033.314c-.029.172-.041.305-.033.395l.001.021c0 .052-.016.078-.048.078s-.069-.024-.102-.066c-.05-.065-.083-.088-.121-.088-.057 0-.075.03-.16.278-.026.077-.093.18-.198.306-.082.098-.133.179-.149.24l-.049.173c-.01.022-.029.039-.05.041h-.01c-.136 0-.292-.292-.452-.842-.045-.156-.103-.225-.187-.225-.06 0-.121.04-.144.093l-.05.116c-.009.02-.026.033-.043.033-.066 0-.167-.121-.229-.273-.051-.126-.081-.263-.081-.38 0-.259.191-.452.766-.776.251-.141.348-.32.348-.643 0-.256-.077-.403-.297-.57-.175-.132-.275-.182-.368-.182-.078 0-.14.052-.14.118 0 .078.045.125.153.162.127.046.182.09.182.147s-.054.165-.141.299a.76.76 0 0 1-.264.273c-.039.227-.146.372-.397.537-.05.142-.078.193-.165.305l-.174.223a.72.72 0 0 0-.124.256c-.018.126-.03.148-.078.148-.066 0-.2-.116-.335-.289a.4.4 0 0 1-.107-.273c-.086.068-.113.077-.281.099-.101.014-.176.033-.223.059-.127.066-.168.084-.191.084-.034 0-.046-.018-.049-.076l-.008-.182c0-.249.098-.54.248-.734-.145-.125-.289-.202-.405-.215-.049-.005-.062-.011-.062-.029 0-.03.032-.095.078-.152.062-.078.111-.133.149-.165a.82.82 0 0 1 .479-.174c.107 0 .266.039.471.116.074.028.14.041.197.041.091 0 .142-.059.142-.163 0-.15-.074-.385-.241-.762-.112-.255-.163-.427-.163-.545 0-.286.135-.583.304-.668-.08-.04-.115-.05-.184-.05-.083 0-.148.021-.254.083-.122.071-.19.134-.19.176 0 .016.025.042.067.072.064.045.075.061.075.122 0 .204-.169.477-.405.654-.052.289-.185.464-.446.586-.034.157-.056.203-.182.38-.216.302-.266.399-.305.57-.02.089-.041.125-.068.125-.035 0-.108-.055-.188-.142l-.132-.158c-.035-.054-.053-.107-.091-.263-.06.017-.076.02-.118.02l-.121-.003-.083-.002c-.181 0-.373.041-.471.101-.036.022-.06.033-.072.033-.053 0-.097-.112-.097-.25 0-.298.083-.545.276-.814-.071-.059-.094-.081-.157-.148-.027-.03-.05-.05-.066-.058l-.091-.05c-.031-.017-.05-.037-.05-.054 0-.044.121-.159.264-.252.159-.102.285-.149.4-.149.135 0 .207.035.376.182.117.102.197.149.254.149.04 0 .084-.028.118-.075.028-.038.043-.08.043-.123 0-.158-.189-.424-.398-.562-.156-.102-.326-.157-.492-.157-.23 0-.428.114-.515.297l-.091.19c-.021.044-.048.067-.079.067-.053 0-.081-.024-.268-.241l-.214-.248a.66.66 0 0 1-.133-.214.49.49 0 0 0-.041-.107c-.007-.011-.036-.033-.082-.067-.166-.117-.287-.349-.356-.685l-.025-.107c-.202-.248-.323-.555-.323-.821a.36.36 0 0 1 .018-.113c-.118-.118-.131-.147-.224-.463-.066-.224-.108-.32-.19-.428-.022-.029-.033-.052-.033-.064 0-.045.069-.081.19-.101l.166-.019.165.011-.01-.226c0-.133.01-.219.044-.377a1.01 1.01 0 0 0 .025-.172 1.68 1.68 0 0 0-.017-.142.73.73 0 0 0-.024-.124c-.012-.031-.018-.055-.018-.07 0-.027.018-.041.049-.041.024 0 .058.012.109.036.144.071.263.143.355.215.285.223.509.566.509.779l-.004.065-.017.121c0 .036.02.084.058.134l.091.125c.081.11.133.238.133.324 0 .077-.016.111-.149.304-.1.144-.141.252-.141.369 0 .149.059.337.166.522.097.171.21.265.317.265.184 0 .265-.163.294-.604l.066-.627.008-.113c0-.052-.016-.082-.082-.161l-.249-.288c-.236-.276-.307-.418-.307-.616 0-.227.149-.384.365-.384l.099.009-.001-.064a.87.87 0 0 1 .042-.224c.052-.177.117-.314.151-.314.011 0 .038.013.08.041a1.22 1.22 0 0 0 .149.057.99.99 0 0 1 .164.082.73.73 0 0 1 .133-.289c.107-.138.228-.247.271-.247.026 0 .074.035.134.099l.157.165c.026.027.057.081.091.157l.033.074.099-.057.132-.075.157-.075c.042-.028.072-.043.085-.043.028 0 .043.05.105.323a1.9 1.9 0 0 1 .058.364c.264.037.388.188.388.471 0 .266-.131.531-.413.832-.077.083-.096.122-.096.199 0 .73.254.96 1.17 1.057a5.83 5.83 0 0 1 1.404.298c.355.106.559.15.691.15.156 0 .243-.057.243-.157 0-.153-.111-.201-.769-.34-1.531-.322-2.132-.845-2.132-1.854 0-.719.538-1.259 1.257-1.259.317 0 .6.097 1.048.355.416.241.502.282.597.282.111 0 .155-.071.155-.25 0-.116.035-.172.107-.172.066 0 .166.068.256.174.137.162.19.245.19.469 0 .299-.159.494-.471.58l-.074.082c-.17.192-.32.256-.59.256-.235 0-.53-.096-.732-.24l-.397-.28c-.171-.121-.296-.173-.416-.173-.169 0-.27.101-.27.27s.11.301.29.357l.372.116c.132.041.194.067.355.148l.185-.018a1.57 1.57 0 0 1 .699.174l.149.067.207.058c.469.131.851.656.851 1.168 0 .584-.379.946-.991.946l-.191-.009z" class="B" data-v-7c4b1471></path><g class="C" data-v-7c4b1471><path d="M8.218 25.961l.049-.1.116-.124a.61.61 0 0 0 .15-.427c0-.085-.015-.177-.043-.267a.67.67 0 0 1-.034-.146c0-.052.039-.102.079-.102.016 0 .039.015.062.042l.124.14c.077.087.103.103.199.124l.008-.115.008-.136a.96.96 0 0 0-.182-.558c-.066-.084-.096-.135-.096-.164s.022-.059.045-.059c.008 0 .02.006.034.017.08.059.121.084.143.084.04 0 .076-.1.076-.208a.77.77 0 0 0-.169-.461l-.099-.123c-.011-.016-.017-.032-.017-.047 0-.035.029-.061.072-.061.031 0 .05.008.102.041.024.016.047.025.063.025.064 0 .088-.09.088-.336l-.003-.102-.008-.149c-.009-.175-.025-.206-.099-.206H8.82c-.403 0-.591-.255-.695-.942-.075-.496-.26-.849-.487-.925-.136-.045-.184-.075-.184-.111 0-.023.038-.045.079-.045.03 0 .099.018.156.041.329.133.607.198.849.198.396 0 .708-.291.708-.66 0-.282-.134-.569-.376-.801a.47.47 0 0 0-.206-.124l-.016-.001-.042.014.017.119.032.083.009.034c0 .018-.017.032-.039.032s-.047-.013-.061-.034l-.149-.223c-.118-.176-.376-.323-.568-.323-.068 0-.111.03-.111.078 0 .023.022.066.05.097.042.048.075.105.075.132l-.009.033c-.009.019-.021.031-.033.033l-.008.001c-.04 0-.06-.018-.24-.223-.147-.168-.448-.332-.609-.332-.023 0-.044.01-.061.026-.01.011-.017.023-.017.032 0 .025.022.058.058.092.051.047.092.103.092.127 0 .016-.022.031-.046.031-.047 0-.153-.068-.351-.224-.308-.242-.446-.47-.446-.735 0-.319.228-.57.52-.57.171 0 .398.082.554.199l.405.305a1.07 1.07 0 0 0 .65.215c.052 0 .1-.012.143-.033.036-.018.058-.04.058-.055s-.031-.024-.075-.036c-.146-.035-.264-.085-.347-.148l-.413-.313-.323-.223c-.218-.114-.26-.141-.26-.167 0-.013.021-.024.047-.024s.047.006.073.017l.215.091a2.47 2.47 0 0 1 .347.206c.442.295.667.39.927.39.247 0 .429-.175.429-.413 0-.047-.012-.098-.034-.143l-.041-.082c-.014-.029-.041-.052-.049-.041l-.025.033-.025.174c-.001.007-.01.027-.025.058a.29.29 0 0 1-.057.082c-.045.044-.083.058-.152.058-.128 0-.271-.068-.658-.315s-.744-.379-1.024-.379A1.07 1.07 0 0 0 5.977 18c0 .827.505 1.2 2.09 1.546.603.131.868.308.868.576 0 .245-.198.399-.512.399a1.19 1.19 0 0 1-.224-.016 30.59 30.59 0 0 1-.851-.232c-.354-.098-.752-.154-1.103-.154-.622 0-1.041.185-1.326.584-.033.047-.066.075-.088.075s-.042-.012-.069-.033l-.115-.091a.56.56 0 0 0-.297-.113.26.26 0 0 0-.175.072c-.041.037-.072.057-.084.057-.046 0-.131-.104-.196-.239-.093-.193-.141-.386-.141-.562 0-.082.026-.14.062-.14.029 0 .05.022.062.066l.066.239c.037.134.112.239.169.239.025 0 .07-.036.12-.099.031-.038.053-.077.067-.116.052-.151.064-.173.105-.173.048 0 .073.03.106.181.007.028.016.07.054.142.033.062.067.113.099.149.025.027.056.042.085.042.073 0 .139-.121.139-.255l-.002-.035-.01-.153c0-.059.009-.077.043-.077.019 0 .034.01.058.04.035.046.083.085.141.115.05.027.091.041.118.041s.053-.015.08-.041.035-.044.035-.065-.03-.062-.117-.15c-.047-.048-.103-.16-.165-.33-.023-.065-.054-.109-.078-.109a.24.24 0 0 0-.054.018l-.107.041-.074.033-.028.006c-.035 0-.063-.025-.063-.057s.075-.121.166-.196c.065-.055.105-.136.107-.223.005-.139.031-.248.058-.248l.025-.289a2.98 2.98 0 0 1 .124-.569c.01-.03.023-.051.033-.051.023 0 .044.062.044.131l-.003.043-.005.084c0 .105.017.159.049.159s.077-.052.113-.143l.074-.19c.022-.056.033-.108.033-.154 0-.161-.096-.277-.23-.277l-.06.01-.058.017c-.043 0-.08-.048-.08-.104l.006-.119-.008-.165-.033-.173-.009-.083-.074.041-.091.075-.132.107-.108.091c-.019.016-.04.025-.061.025-.063 0-.071-.017-.071-.146v-.032c0-.104-.045-.185-.214-.384l-.042-.05-.032.058-.091.149-.091.231-.017.148c-.004.043-.021.076-.037.076s-.046-.022-.078-.059a.64.64 0 0 0-.355-.239l-.016.057-.05.174-.009.156v.066.053c0 .046-.017.073-.045.073l-.005-.001c-.003-.001-.019-.015-.049-.041-.019-.017-.054-.026-.1-.026-.154 0-.252.083-.252.215 0 .099.034.173.186.404.068.103.146.184.176.184.02 0 .037-.054.037-.117l-.015-.099a.41.41 0 0 1-.024-.126c0-.046.013-.071.039-.071.074 0 .151.214.183.512l.033.305a1.02 1.02 0 0 0 .099.33c.075.154.149.259.215.305.079.056.128.111.128.141s-.021.05-.044.05a5.64 5.64 0 0 1-.167-.059l-.069-.01c-.165 0-.22.094-.22.378a1.58 1.58 0 0 0 .488 1.044l.38.306c.193.154.339.411.339.595 0 .242-.265.479-.719.644l-.273.098c-.212.077-.297.214-.38.611a.68.68 0 0 1 .228-.042l.037.001.123.003c.111 0 .191-.067.191-.16v-.075c0-.022.031-.053.074-.075l.115-.058.091-.074c.014-.012.03-.019.046-.019.025 0 .045.024.045.051 0 .02-.006.046-.017.075-.028.073-.044.173-.044.284 0 .202.032.311.135.459.009-.055.018-.09.025-.107.017-.042.064-.117.141-.223.125-.175.217-.388.217-.503l-.003-.035-.025-.189c0-.034.015-.067.041-.091.018-.016.038-.024.057-.024.035 0 .057.021.085.082.019.046.043.074.059.074.029 0 .086-.076.122-.165.028-.066.041-.125.041-.175a.29.29 0 0 0-.025-.106.29.29 0 0 1-.026-.094c0-.021.007-.031.034-.055.014-.01.028-.017.042-.017s.033.007.049.017l.074.05c.025.017.048.025.066.025.033 0 .081-.037.116-.092.072-.11.117-.201.117-.235 0-.022-.014-.038-.059-.07-.164-.116-.273-.292-.273-.442 0-.035.008-.074.025-.119l.082-.223.066-.174.024-.009c.028 0 .045.023.045.064a.28.28 0 0 1-.019.094.54.54 0 0 0-.035.157c0 .059.03.091.086.091a.35.35 0 0 0 .138-.033c.25-.108.448-.166.567-.166.112 0 .182.025.3.108.031.022.058.034.076.034s.032-.013.048-.034c.01-.014.017-.029.017-.041 0-.03-.018-.069-.05-.107-.042-.052-.059-.085-.059-.117 0-.026.017-.043.044-.043.13 0 .333.41.527 1.059.152.513.195.565.685.843.415.234.573.466.573.843 0 .371-.103.846-.317 1.469-.067.197-.097.33-.097.44 0 .087.017.172.081.394l.057.19a.98.98 0 0 0 .065.124zm-1.916-1.156a1.38 1.38 0 0 1 .157-.281c.134-.201.182-.308.182-.404v-.133c0-.124.022-.182.072-.182a.05.05 0 0 1 .044.025l.042.066c.013.021.035.033.058.033s.069-.032.093-.069a.58.58 0 0 0 .096-.295l-.009-.091-.008-.093c0-.05.02-.072.068-.072.032 0 .046.013.073.075.017.04.042.066.062.066s.049-.018.069-.05l.066-.099.05-.075.012-.04c0-.023-.018-.044-.062-.076-.153-.11-.22-.28-.231-.587a2.62 2.62 0 0 0-.041-.454c-.005-.016-.054-.134-.149-.355a2.15 2.15 0 0 1-.099-.297c-.019-.076-.043-.1-.103-.1-.225 0-.394.192-.394.445 0 .149.062.384.175.67.159.401.201.544.201.686 0 .303-.121.435-.54.586-.473.169-.695.413-.777.85l.083-.033.115-.041.132-.033c.114-.028.205-.092.223-.157l.033-.115c.008-.028.024-.047.05-.058l.074-.033.074-.033.016.033-.033.107a.74.74 0 0 0-.017.192c0 .181.036.287.141.418zm-4.437-7.639l.017.093c0 .026-.011.097-.033.213l-.015.168a1.35 1.35 0 0 0 .072.435c.042.119.053.156.053.188l-.003.035c-.008.034-.02.059-.032.059s-.046-.018-.084-.052c-.095-.082-.198-.125-.305-.125a.81.81 0 0 0-.141.017l.041.091.124.306.174.264.181.125a.2.2 0 0 1 .05.049c.017.023.025.042.025.055s-.014.027-.033.028l-.166.008c-.025.001-.045.017-.045.035 0 .028.024.111.071.246l.083.24.14.132.148.108c.044.031.07.067.07.093s-.014.046-.029.047l-.082.008c-.027.003-.042.026-.042.066 0 .062.058.174.166.322.128.174.218.232.364.232h.124c.059 0 .095.029.095.076 0 .039-.025.061-.079.073-.114.024-.149.047-.149.1 0 .021.015.053.042.09l.067.091c.055.075.117.125.157.125.022 0 .074-.023.121-.076.074-.085.159-.154.268-.181.287-.073.347-.103.347-.169 0-.023-.011-.056-.033-.095-.091-.164-.124-.27-.124-.404 0-.103-.027-.175-.064-.175-.015 0-.041.025-.068.067-.068.104-.184.174-.289.174-.286 0-.631-.448-.801-1.041a1.65 1.65 0 0 1-.061-.445 2.32 2.32 0 0 1 .061-.504l.091-.405.011-.112c0-.205-.189-.468-.481-.673z" data-v-7c4b1471></path><path d="M2.46 18.315c-.061.115-.117.374-.117.538 0 .096.031.173.072.173.013 0 .03-.016.045-.041l.091-.166.059-.101c.033-.049.057-.097.057-.123 0-.098-.091-.219-.207-.28zm.907 3.723l.041.041.066.066c.161.167.185.183.263.183.072 0 .222-.041.307-.084.036-.018.058-.04.058-.057 0-.011-.03-.044-.082-.091l-.091-.083c-.069-.062-.171-.12-.22-.12-.095 0-.192.042-.342.145zm1.918 1.495a.82.82 0 0 1 .165.083l.132.082c.025.011.07.017.131.017.038 0 .072-.006.1-.017l.174-.066.165-.058c.044-.016.075-.041.075-.064 0-.012-.037-.03-.091-.043-.037-.01-.079-.023-.124-.041-.203-.082-.233-.094-.305-.094a.55.55 0 0 0-.422.201zm2.092 1.66c.055-.133.093-.155.281-.166.212-.012.414-.391.414-.648 0-.025-.009-.047-.019-.047s-.03.01-.056.027l-.123.074-.24.107c-.16.071-.301.285-.301.453a.57.57 0 0 0 .045.2z" data-v-7c4b1471></path></g><g class="B" data-v-7c4b1471><path d="M3.807 18.018l.082.116a.78.78 0 0 0 .179.129c.025 0 .038-.018.053-.071l.033-.125.009-.138c0-.109-.048-.15-.273-.208-.056-.015-.079-.013-.115-.042s-.079-.05-.099-.05-.034.01-.034.023l.009.037.041.123.116.207zm.883-.001l-.108.191-.041.045-.008.001c-.044 0-.088-.1-.091-.204l-.016-.164c.013-.068.053-.098.156-.116.084-.015.14-.031.166-.049l.132-.091.014-.003c.022 0 .038.015.038.04l-.003.011-.058.141-.182.197z" data-v-7c4b1471></path><path d="M4.474 18.579l.132-.066c.024-.012.045-.018.063-.018.064 0 .112.048.112.134 0 .094-.074.221-.175.403-.109.197-.191.331-.276.331-.119 0-.287-.189-.451-.505a.54.54 0 0 1-.063-.23c0-.104.046-.159.133-.159.036 0 .07.008.095.026l.074.05a.39.39 0 0 0 .21.066c.052 0 .101-.011.145-.033z" data-v-7c4b1471></path></g><path d="M4.384 18.761s.076.016.076.067l-.002.024-.025.123c-.027.165-.065.267-.119.267a.08.08 0 0 1-.063-.035c-.085-.12-.15-.274-.15-.358 0-.059.026-.089.079-.089l.014.001.091.016.099-.016z" class="C" data-v-7c4b1471></path><path d="M23.617 5.468l.033.124.067.264c.104.472.191.595.421.595.131 0 .24-.073.273-.182l.049-.166c.011-.038.04-.06.079-.06.09 0 .138.099.145.299.002.076.019.211.049.404.023.139.033.267.033.382 0 .084-.01.133-.049.237l.016.099c.028.163.035.227.035.303 0 .354-.017.459-.101.597.034.135.044.199.044.296a2.47 2.47 0 0 1-.027.291l-.033.248.008.148c.022.055.033.089.033.099 0 .023-.015.041-.033.041l-.165-.066-.182-.099c-.059-.032-.1-.064-.19-.148-.16.141-.183.154-.373.206-.274.076-.375.11-.429.149s-.093.057-.114.057c-.064 0-.109-.124-.109-.306 0-.264.015-.401.058-.504-.161-.054-.242-.074-.3-.074a.69.69 0 0 0-.121.016l-.037.004c-.024 0-.038-.029-.038-.079 0-.081.057-.242.133-.378.103-.184.274-.338.405-.364l.297-.057c.176-.034.281-.128.281-.251s-.077-.197-.257-.261l-.347-.124c-.117-.042-.227-.094-.331-.157l-.181-.108c-.03-.016-.062-.024-.09-.024-.073 0-.118.054-.118.141 0 .14.108.255.24.255.03 0 .058-.006.084-.017a.27.27 0 0 1 .076-.025c.038 0 .065.036.065.089a1.04 1.04 0 0 1-.19.522l-.099.124-.075.099c-.027.213-.236.517-.446.652-.051.032-.082.054-.091.066s-.029.047-.066.116c-.017.032-.073.098-.166.198a1.55 1.55 0 0 1-.206.19c-.092.07-.148.121-.165.149l-.083.141c-.017.029-.038.043-.065.043-.074 0-.142-.077-.183-.207-.03-.097-.057-.125-.12-.125a.64.64 0 0 0-.145.025l-.115.016h-.29c-.156 0-.365.038-.463.084-.037.018-.066.026-.085.026-.043 0-.064-.043-.064-.126 0-.262.202-.659.464-.916-.039-.152-.119-.254-.223-.28l-.149-.041c-.015-.008-.025-.023-.025-.039 0-.053.059-.105.19-.167.235-.111.303-.132.434-.132.15 0 .276.051.433.174.224.174.285.207.393.207.176 0 .345-.161.345-.331 0-.114-.063-.241-.218-.438-.22-.281-.298-.464-.298-.702 0-.16.032-.247.174-.478.022-.037.035-.068.035-.089 0-.045-.075-.086-.159-.086-.088 0-.198.032-.347.1-.249.114-.397.22-.397.285 0 .026.015.052.041.07l.083.058c.01.007.017.02.017.034s-.016.043-.034.064-.072.09-.165.215a1.89 1.89 0 0 1-.346.363c-.045.034-.072.062-.084.083l-.066.124c-.055.103-.218.274-.356.371l-.115.091a.75.75 0 0 0-.075.115c-.084.151-.217.338-.305.429l-.223.231c-.064.066-.154.257-.24.511-.068.2-.111.29-.142.29-.014 0-.037-.018-.064-.05-.16-.183-.305-.298-.375-.298-.018 0-.051.015-.096.042-.063.038-.138.067-.223.083l-.297.057c-.249.049-.348.102-.463.248-.029.036-.053.057-.066.057-.029 0-.051-.043-.051-.1l.008-.072c.002-.008.016-.082.042-.223.043-.235.103-.43.165-.537l.149-.256c.026-.046.041-.093.041-.133 0-.19-.178-.345-.447-.387-.055-.008-.083-.028-.083-.06 0-.038.065-.111.166-.188.175-.132.368-.215.505-.215.092 0 .215.032.379.099l.182.074a.49.49 0 0 0 .184.043c.192 0 .379-.175.609-.57.104-.18.149-.308.149-.425 0-.209-.097-.335-.26-.335-.128 0-.25.054-.351.156-.09.091-.137.182-.137.264l.005.084.003.044c0 .067-.023.097-.072.097-.062 0-.135-.061-.278-.232l-.232-.272c-.251-.294-.303-.411-.314-.711-.28-.357-.35-.555-.35-.762l.011-.17c-.201-.241-.24-.334-.24-.572 0-.107.006-.135.05-.221-.099-.145-.13-.21-.157-.322l-.041-.174a.9.9 0 0 0-.132-.305c-.074-.106-.083-.122-.083-.153 0-.086.083-.142.29-.193l.066-.017-.017-.183c0-.048.006-.094.025-.197l.058-.306c.022-.119.034-.21.034-.271 0-.07-.011-.127-.05-.257-.025-.084-.054-.148-.082-.19s-.05-.084-.05-.109.028-.05.066-.05c.02 0 .055.01.083.027.279.152.584.393.801.636.187.208.247.381.289.843.253.188.348.358.348.626 0 .159-.052.313-.166.488l-.165.256c-.049.083-.075.179-.075.279 0 .149.045.289.157.488.123.219.234.314.365.314.217 0 .413-.374.413-.786l-.002-.081v-.223l.025-.198.016-.24.016-.199.006-.11c0-.081-.017-.105-.271-.376-.28-.3-.413-.554-.413-.789 0-.222.191-.408.42-.408a.6.6 0 0 1 .175.032v-.099l.009-.174.049-.214c.04-.171.058-.203.117-.203l.041.013.149.132.181.116.05.049c.012-.103.021-.124.107-.232l.115-.157.083-.124.05-.074c.033-.05.047-.063.069-.063s.038.029.064.121c.011.037.036.075.074.107l.108.091c.087.074.145.183.181.339a.49.49 0 0 1 .165-.149l.133-.074c.049-.028.079-.042.088-.042s.023.02.037.05c.019.043.041.085.066.124.106.171.116.196.116.311v.069.099c.268.074.372.218.372.514 0 .222-.102.451-.315.707-.141.17-.165.222-.165.35 0 .582.439.935 1.181.946l.545.008a2.67 2.67 0 0 1 .901.148c.382.125.507.158.623.158.084 0 .154-.047.154-.103 0-.119-.2-.212-.868-.401-1.361-.384-1.942-.935-1.942-1.836 0-.698.583-1.237 1.336-1.237a2.88 2.88 0 0 1 .82.141l.338.124a.52.52 0 0 0 .178.035c.117 0 .161-.073.178-.291.008-.097.039-.14.101-.14.087 0 .256.136.378.306s.215.404.215.554c0 .176-.092.31-.29.421-.185.282-.413.412-.72.412-.235 0-.635-.159-1.139-.454-.198-.116-.285-.148-.395-.148-.149 0-.244.083-.244.214 0 .119.088.194.334.28.391.139.475.185.587.323.421.073.604.146.868.346.754.242 1.174.707 1.174 1.303 0 .543-.368.935-.88.935a1.59 1.59 0 0 1-.402-.049z" class="B" data-v-7c4b1471></path><g class="C" data-v-7c4b1471><path d="M24.436 9.084a.58.58 0 0 1-.026-.152l.009-.112.026-.321c0-.061-.018-.109-.06-.166l-.099-.132c-.064-.087-.09-.151-.09-.225 0-.048.011-.064.044-.064l.038.008.074.041c.034.019.062.028.083.028.05 0 .075-.077.075-.232 0-.215-.04-.318-.165-.415l-.115-.091c-.02-.016-.034-.043-.034-.067 0-.04.023-.065.061-.065.023 0 .056.007.096.017l.091.025.035.008c.028 0 .041-.022.041-.07l-.002-.029-.025-.174c-.026-.18-.056-.249-.109-.249l-.015.002-.215.041-.036.003c-.258 0-.562-.245-.641-.515l-.157-.537c-.047-.161-.187-.338-.397-.504-.069-.053-.129-.085-.173-.09l-.148-.017c-.027-.003-.045-.019-.045-.04 0-.034.029-.05.09-.05a2.35 2.35 0 0 1 .732.14c.271.089.419.121.552.121.414 0 .663-.216.663-.573 0-.179-.065-.404-.157-.548-.081-.124-.216-.251-.389-.363-.025-.017-.048-.026-.065-.026-.023 0-.042.018-.042.042l.008.107v.01c0 .04-.023.073-.053.073-.046 0-.093-.054-.146-.165-.152-.323-.398-.517-.65-.517-.055 0-.087.019-.087.053 0 .025.015.063.043.109a1.02 1.02 0 0 1 .074.148c.013.032.02.056.02.071s-.013.024-.038.024c-.037 0-.098-.034-.139-.077l-.124-.132-.107-.166a.79.79 0 0 0-.33-.289l-.207-.107c-.041-.022-.078-.033-.105-.033s-.039.013-.039.038.013.059.037.095a.42.42 0 0 1 .076.165c0 .028-.018.05-.041.05-.058 0-.162-.104-.307-.305a.78.78 0 0 1-.158-.464c0-.348.268-.619.613-.619.177 0 .399.074.618.207l.397.24c.142.086.334.149.453.149.066 0 .085-.004.233-.042a.4.4 0 0 0 .107-.041.06.06 0 0 0 .03-.049c0-.032-.02-.047-.072-.05-.184-.014-.566-.164-.817-.323l-.273-.156c-.068-.024-.114-.059-.114-.088 0-.013.014-.02.036-.02l.048.008.36.132.347.124a2.18 2.18 0 0 0 .555.116c.157 0 .265-.108.265-.265 0-.079-.02-.168-.052-.23-.05-.099-.116-.182-.143-.182s-.05.033-.05.088l.004.044.01.106c0 .111-.061.184-.156.184a.46.46 0 0 1-.135-.026l-.537-.173c-.467-.151-.648-.19-.878-.19-.649 0-1.139.438-1.139 1.022 0 .658.452 1.071 1.612 1.472 1.125.39 1.272.479 1.272.775 0 .17-.16.307-.359.307a1 1 0 0 1-.194-.017l-.562-.132c-.345-.084-.637-.116-1.027-.116-.819 0-1.214.125-1.642.521-.099.092-.139.109-.249.109l-.056-.002-.13-.008c-.099 0-.174.047-.234.149l-.132.223c-.018.031-.054.049-.095.049-.086 0-.131-.057-.212-.256a.64.64 0 0 0-.165-.249l-.174-.149c-.026-.023-.041-.044-.041-.06s.015-.044.041-.08a.84.84 0 0 0 .075-.115c.026-.047.039-.081.041-.1l.008-.074c.006-.024.03-.041.059-.041.055 0 .064.036.066.24a.59.59 0 0 0 .066.223l.091.198c.012.026.041.042.075.042.086 0 .194-.201.223-.413.024-.176.043-.232.079-.232s.049.021.119.166c.022.044.056.087.099.124s.075.054.099.058h.009c.068 0 .091-.043.107-.199l.008-.082-.016-.116-.011-.08c0-.053.018-.085.049-.085.018 0 .04.012.062.033l.082.083c.055.055.111.083.166.083.071 0 .115-.031.115-.081 0-.041-.009-.061-.082-.183a2.51 2.51 0 0 1-.058-.107c-.073-.139-.108-.174-.173-.174l-.084.008-.107.041c-.032.012-.054.019-.066.019s-.025-.014-.025-.033c0-.074.049-.192.141-.336.057-.09.104-.311.14-.649l.042-.388c.025-.238.051-.323.099-.323.017 0 .031.007.033.017l.024.124v.099c0 .12.013.218.028.218.035 0 .087-.041.137-.111.036-.05.063-.106.083-.165.027-.089.042-.161.042-.217 0-.191-.081-.273-.339-.345-.026-.007-.043-.016-.049-.025l-.009-.074v-.14l-.017-.1-.032-.083-.042-.091-.066.041c-.169.106-.21.138-.265.206-.075.095-.101.119-.132.119-.023 0-.041-.016-.041-.036l.033-.141.003-.024c0-.181-.102-.356-.284-.487l-.058.066-.058.083-.099.116a.39.39 0 0 0-.092.245l.001.027.001.028c0 .065-.01.088-.041.088l-.018-.008-.066-.082-.082-.099-.149-.091-.074-.058c-.08.173-.083.191-.083.414v.069c0 .074-.024.119-.064.119-.019 0-.054-.014-.101-.041-.037-.021-.082-.034-.123-.034-.12 0-.209.102-.209.241 0 .151.032.255.133.42.065.107.15.194.19.194.005 0 .015-.076.025-.177l-.025-.14-.005-.063c0-.053.019-.086.05-.086.046 0 .075.051.104.19l.066.306.066.388a1.54 1.54 0 0 0 .099.33 1.66 1.66 0 0 0 .14.289c.092.118.102.134.102.16 0 .02-.017.038-.036.038s-.043-.009-.075-.025a.32.32 0 0 0-.099-.033l-.06-.009c-.137 0-.165.11-.165.658 0 .342.071.798.159 1.019l.174.438a.56.56 0 0 1 .042.205c0 .604-.817 1.569-1.678 1.983-.335.161-.434.328-.463.784a1.57 1.57 0 0 1 .289-.116c.244-.068.296-.103.33-.215l.033-.107c.015-.046.045-.068.215-.157.064-.033.109-.07.132-.107.043-.071.076-.106.098-.106s.036.016.036.043l-.002.013-.091.236-.01.058c0 .173.097.317.275.408l.024-.099c.029-.126.058-.209.083-.248l.158-.231c.057-.084.067-.139.067-.352l-.009-.26-.005-.066c0-.043.02-.066.058-.066.029 0 .053.011.055.025l.024.157c.006.033.04.058.081.058.08 0 .182-.102.291-.289.042-.073.059-.13.059-.197a1.9 1.9 0 0 0-.026-.257l-.003-.049c0-.041.024-.067.063-.067.021 0 .037.007.04.016l.058.191c.019.063.063.107.108.107.127 0 .258-.259.272-.537.008-.174.03-.235.083-.235.021 0 .036.008.041.02l.033.107c.012.039.036.065.059.065s.063-.037.115-.099a.83.83 0 0 0 .074-.107c.016-.029.024-.059.024-.087 0-.042-.009-.053-.074-.103s-.091-.104-.091-.173c0-.12.051-.248.198-.504.135-.232.153-.259.186-.259s.055.031.055.07c0 .024-.006.049-.017.072-.022.048-.034.081-.034.099 0 .032.029.059.066.059a.34.34 0 0 0 .15-.05 1.66 1.66 0 0 1 .223-.091l.281-.099c.052-.016.111-.025.17-.025.048 0 .096.006.144.017.094.022.158.034.19.034a.13.13 0 0 0 .133-.131.35.35 0 0 0-.051-.143l-.074-.115c-.036-.038-.051-.059-.051-.074l.009-.033c.01-.02.028-.033.047-.033.065 0 .193.133.308.323.648 1.057.962 1.391 1.438 1.535.496.15.613.255.613.551 0 .157-.083.346-.258.588-.33.458-.447.726-.447 1.031l.001.068.091-.042c.068-.034.112-.054.133-.058l.206-.041c.065-.013.117-.09.117-.173l-.001-.016-.016-.182-.002-.029c0-.102.05-.187.112-.187.023 0 .049.021.063.05l.132.28c.053.115.116.17.288.248zM17.663 3.42l-.008.132-.002.081c0 .134.021.192.071.192s.141-.102.179-.215a.84.84 0 0 0 .042-.224.41.41 0 0 0-.042-.156c-.037-.09-.075-.132-.121-.132-.071 0-.104.091-.118.322z" data-v-7c4b1471></path><path d="M17.089 1.893l.116.115c.169.169.285.513.285.846l-.004.137-.025.388-.008.325c0 1.079.314 1.815.776 1.815.116 0 .258-.107.356-.265.021-.034.049-.06.066-.06.026 0 .039.022.041.068l.017.182.016.156.025.174.025.162c0 .083-.017.092-.215.119a.55.55 0 0 0-.231.091c-.128.082-.203.139-.215.165l-.091.19c-.012.024-.032.041-.053.041-.048 0-.113-.073-.204-.231-.084-.146-.121-.23-.121-.269 0-.026.021-.037.072-.037h.017l.099.008.074-.008.067-.008c.028-.007.049-.031.049-.061 0-.02-.013-.038-.033-.046l-.372-.158c-.089-.038-.185-.134-.272-.272-.095-.152-.151-.285-.151-.36 0-.059.025-.097.065-.097.016 0 .037.006.061.019a.31.31 0 0 0 .097.033c.014 0 .03-.006.044-.017s.025-.027.025-.04c0-.024-.025-.055-.066-.085-.309-.219-.497-.485-.497-.704 0-.064.015-.094.047-.094l.061.006c.045.009.099.017.116.017.045 0 .076-.025.076-.064 0-.024-.03-.054-.076-.076-.206-.103-.257-.162-.388-.455-.101-.224-.153-.318-.24-.429.108-.052.166-.067.257-.067.113 0 .184.074.264.273.05.126.105.211.137.211.015 0 .021-.026.021-.095a.96.96 0 0 0-.034-.248c-.02-.075-.029-.137-.024-.182l-.017-.116-.091-.05c-.084-.045-.141-.154-.141-.271a.61.61 0 0 1 .017-.15l.05-.191a1.6 1.6 0 0 0 .059-.349v-.071l.074.082zm.992 5.953a1.22 1.22 0 0 1-.396-.123.75.75 0 0 0-.31-.084c-.131 0-.231.041-.351.141l.074.057.166.116.158.124c.04.032.089.05.139.05.162 0 .299-.074.522-.281zm3.182 1.502a.47.47 0 0 1 .165-.215c.231-.198.339-.345.339-.462v-.083-.099c0-.106.017-.141.073-.141.025 0 .045.01.051.025l.041.098c.01.023.036.037.07.037.059 0 .101-.042.203-.21.043-.071.061-.117.061-.164 0-.029-.006-.061-.02-.093l-.058-.131a.4.4 0 0 1-.034-.129c0-.054.027-.086.074-.086.025 0 .047.01.06.026l.065.082c.015.018.019.029.044.029s.085-.029.106-.07l.066-.132c.017-.033.025-.059.025-.075 0-.028-.026-.056-.067-.073l-.116-.05c-.135-.058-.248-.291-.248-.512 0-.025.009-.061.025-.107l.05-.141a.41.41 0 0 0 .025-.137c0-.081-.025-.159-.082-.251l-.099-.173c-.048-.123-.076-.164-.117-.164-.029 0-.057.011-.106.039l-.083.05-.116.133-.116.057c-.069.035-.135.184-.135.309 0 .178.039.275.309.757.114.204.168.355.168.47 0 .336-.343.613-.903.727-.669.137-.815.248-.892.678.116-.06.162-.068.366-.068l.121.002h.019c.111 0 .125-.008.196-.107a.62.62 0 0 1 .273-.23l.132-.058.107-.058.033-.009c.03 0 .046.012.046.035 0 .028-.027.076-.079.139-.08.097-.126.196-.126.272 0 .11.018.166.085.265z" data-v-7c4b1471></path><path d="M20.223 8.069c.095.059.191.145.248.223.079.108.09.116.169.116a.52.52 0 0 0 .12-.017l.074-.016c.141-.027.183-.047.183-.085 0-.023-.013-.046-.035-.064l-.099-.082-.091-.083a.44.44 0 0 0-.261-.092c-.103 0-.186.027-.309.1zm2.675.446l.049-.009.083-.008.066.008.045.003c.084 0 .127-.009.162-.036.051-.039.104-.101.157-.19.021-.035.032-.068.032-.094 0-.056-.051-.088-.141-.088-.227 0-.366.126-.454.413z" data-v-7c4b1471></path></g><g class="B" data-v-7c4b1471><path d="M19.816 2.81c.009.077.049.14.09.14.035 0 .09-.118.176-.263.117-.199.214-.365.214-.471 0-.035-.018-.062-.041-.062l-.009.003-.091.084a.94.94 0 0 1-.273.148c-.106.038-.136.07-.136.14l.012.067.069.143-.011.072zm-.724-.611c-.029-.024-.07-.042-.092-.042-.035 0-.05.018-.05.06l.002.024c.006.045.018.092.032.14a.72.72 0 0 0 .109.231l.125.209a.43.43 0 0 0 .18.114l.006.001c.022 0 .041-.014.043-.034l.026-.165.041-.149.005-.035c0-.04-.023-.076-.062-.096l-.141-.075-.222-.181z" data-v-7c4b1471></path><path d="M19.637 3.248h-.044l-.096-.009c-.003-.001-.037-.021-.1-.059-.037-.022-.077-.033-.116-.033-.076 0-.121.047-.121.126 0 .062.037.182.105.344l.108.264c.091.231.185.33.31.33.107 0 .193-.088.26-.263l.091-.372c.044-.116.153-.164.153-.245 0-.095-.106-.186-.196-.186l-.048.009-.124.084-.182.008z" data-v-7c4b1471></path></g><path d="M19.627 3.477h-.099c-.015 0-.03.009-.041.025-.016.023-.025.038-.025.041l.041.198.033.173c.019.095.034.133.105.133.081 0 .15-.055.159-.157l.017-.182.016-.188c0-.018-.009-.036-.024-.051s-.027-.018-.043-.018l-.007.001-.132.025z" class="C" data-v-7c4b1471></path><path d="M8.2 5.467l.033.124.066.264c.104.472.192.595.421.595.131 0 .24-.073.273-.182l.05-.166c.011-.038.04-.06.079-.06.089 0 .131.093.135.3.002.059.021.194.059.404.022.124.032.245.032.36a.59.59 0 0 1-.049.259c.028.162.05.38.05.506 0 .204-.038.394-.099.493a1.04 1.04 0 0 1 .042.311 1.55 1.55 0 0 1-.026.275l-.044.285a.35.35 0 0 0 .019.111c.016.047.025.081.025.099 0 .023-.011.041-.025.041l-.165-.066-.182-.099c-.059-.032-.099-.064-.19-.148-.161.144-.208.166-.628.281-.084.022-.146.048-.182.074-.052.037-.09.057-.11.057-.071 0-.105-.114-.105-.35 0-.175.018-.337.05-.46a.94.94 0 0 0-.295-.074 1.68 1.68 0 0 0-.126.016l-.031.003c-.024 0-.044-.036-.044-.079 0-.032.022-.112.051-.179l.082-.198c.03-.072.098-.154.198-.239a.53.53 0 0 1 .215-.125l.297-.057c.177-.034.282-.128.282-.251s-.08-.199-.257-.261l-.356-.124a1.91 1.91 0 0 1-.323-.157c-.197-.117-.23-.132-.277-.132-.071 0-.112.054-.112.147 0 .147.096.25.232.25.028 0 .056-.006.083-.017.04-.016.069-.025.085-.025.037 0 .058.022.058.063l-.002.036-.017.14c-.014.12-.092.265-.264.496l-.074.099a1.14 1.14 0 0 1-.446.652l-.091.066c-.007.008-.029.047-.066.116-.017.032-.072.098-.165.198-.07.076-.139.139-.207.19-.092.07-.148.121-.165.149l-.083.141c-.015.025-.04.041-.067.041-.073 0-.14-.076-.182-.206-.03-.097-.057-.125-.12-.125-.032 0-.081.009-.144.025l-.116.016h-.298c-.148 0-.358.039-.454.084-.038.018-.067.026-.085.026s-.033-.011-.055-.051c-.015-.028-.018-.041-.018-.066 0-.151.108-.435.249-.661.09-.143.119-.176.223-.264-.033-.14-.123-.249-.231-.28l-.141-.041c-.019-.006-.033-.022-.033-.04 0-.043.067-.102.191-.167.161-.085.324-.132.454-.132.137 0 .27.054.422.174.224.175.284.207.393.207.177 0 .345-.162.345-.331 0-.113-.06-.234-.218-.438-.23-.298-.299-.452-.299-.671 0-.136.042-.309.093-.386l.083-.123c.017-.028.028-.055.028-.08 0-.055-.064-.095-.149-.095-.13 0-.381.093-.581.215-.099.061-.166.132-.166.176 0 .026.013.05.034.064l.091.058c.011.007.018.02.018.034s-.014.038-.034.064l-.165.215a2.76 2.76 0 0 1-.347.363.57.57 0 0 0-.083.083c-.001.001-.023.042-.066.124a1.3 1.3 0 0 1-.364.371c-.061.042-.099.072-.115.091s-.034.053-.066.115c-.021.04-.057.098-.108.173-.081.12-.148.205-.198.256L3.4 8.67c-.071.071-.133.209-.231.511-.058.183-.111.29-.142.29-.014 0-.037-.018-.065-.051-.164-.187-.305-.298-.379-.298-.017 0-.048.014-.092.042-.061.038-.136.067-.223.083l-.305.057c-.231.044-.322.094-.455.248-.032.036-.058.057-.072.057-.025 0-.044-.041-.044-.096l.008-.077.042-.223c.044-.239.098-.416.165-.537l.14-.256c.028-.05.042-.095.042-.134 0-.19-.197-.365-.437-.387-.061-.005-.084-.02-.084-.054 0-.042.067-.125.159-.193.165-.126.382-.215.52-.215.084 0 .213.035.371.099l.182.074a.49.49 0 0 0 .184.043c.192 0 .379-.175.609-.57.104-.179.149-.308.149-.424 0-.208-.099-.336-.262-.336a.53.53 0 0 0-.357.156c-.083.084-.127.172-.127.256l.003.092.001.02c0 .077-.026.12-.072.12-.078 0-.191-.111-.516-.504-.242-.293-.296-.418-.306-.711-.28-.357-.35-.508-.35-.762l.011-.17c-.194-.233-.249-.36-.249-.574 0-.078.013-.124.059-.218-.099-.145-.13-.21-.157-.322l-.041-.174c-.028-.118-.073-.22-.132-.305-.075-.107-.083-.121-.083-.158 0-.096.066-.134.356-.205-.012-.08-.017-.142-.017-.183 0-.048.006-.094.025-.197l.058-.306c.023-.123.035-.211.035-.264s-.009-.098-.06-.263a.9.9 0 0 0-.074-.19c-.027-.042-.05-.094-.05-.113 0-.029.02-.045.057-.045.082 0 .298.131.522.316.474.392.572.549.637 1.023l.025.166c.227.167.347.369.347.585a1.03 1.03 0 0 1-.165.529l-.166.256a.5.5 0 0 0-.074.273c0 .155.045.298.157.494.127.223.235.314.368.314.218 0 .409-.379.409-.81v-.058l-.001-.149c0-.084.005-.167.018-.272l.024-.24.009-.199.003-.13c0-.046-.024-.093-.086-.158l-.181-.198c-.263-.287-.407-.556-.407-.757a.42.42 0 0 1 .422-.439.6.6 0 0 1 .175.032v-.056c0-.364.075-.633.175-.633l.04.013.149.132.181.116.05.049c0-.102.005-.113.108-.232.025-.028.063-.081.115-.157l.082-.124.042-.074c.022-.042.044-.062.068-.062s.042.03.072.12c.019.056.032.071.174.198.049.044.083.081.098.108s.036.078.067.164l.025.067a.5.5 0 0 1 .165-.149l.124-.074c.046-.028.078-.043.092-.043s.026.02.041.05.035.07.066.124c.088.153.117.227.117.299l-.002.081V1.9c.271.085.373.225.373.517 0 .21-.11.451-.323.705-.142.169-.158.202-.158.32 0 .611.426.965 1.175.976l.553.008c.356.006.569.041.9.148.393.128.507.158.621.158.038 0 .076-.012.107-.034s.049-.049.049-.073c0-.116-.203-.209-.867-.397-1.363-.386-1.941-.934-1.941-1.836 0-.697.58-1.237 1.33-1.237a2.89 2.89 0 0 1 .826.141l.338.124c.062.023.122.035.178.035.115 0 .174-.096.177-.291.003-.104.026-.14.094-.14.095 0 .242.119.377.306s.224.401.224.542c0 .188-.088.319-.29.432-.185.282-.414.412-.72.412-.239 0-.578-.135-1.139-.454-.196-.111-.295-.148-.398-.148-.146 0-.241.084-.241.214 0 .119.092.196.334.28.357.125.445.174.586.323.421.073.604.146.868.346.725.23 1.175.724 1.175 1.291 0 .551-.373.947-.892.947a1.6 1.6 0 0 1-.391-.05z" class="B" data-v-7c4b1471></path><g class="C" data-v-7c4b1471><path d="M9.021 9.083a.91.91 0 0 1-.03-.199l.006-.065.025-.132.009-.107c0-.115-.019-.196-.058-.248L8.872 8.2c-.032-.044-.061-.097-.082-.158-.011-.03-.017-.054-.017-.069 0-.037.023-.061.06-.061.022 0 .031.004.106.048.032.019.06.028.081.028.049 0 .076-.08.076-.22 0-.159-.015-.248-.05-.288l-.124-.14-.107-.091c-.026-.022-.044-.05-.044-.072 0-.035.027-.06.067-.06s.117.018.183.042l.038.008c.034 0 .047-.02.047-.068l-.002-.03-.025-.174-.049-.181c-.016-.059-.025-.068-.063-.068l-.021.003-.206.041-.033.003c-.271 0-.557-.228-.644-.515l-.165-.537c-.051-.166-.175-.323-.397-.504-.067-.055-.124-.086-.165-.09l-.149-.017c-.027-.003-.045-.019-.045-.041 0-.034.032-.049.103-.049l.272.024a2.14 2.14 0 0 1 .446.116c.247.085.414.121.554.121.413 0 .661-.223.661-.597 0-.256-.134-.545-.338-.731-.117-.104-.233-.182-.273-.182-.023 0-.042.019-.042.044l.008.105v.01c0 .041-.024.073-.057.073-.042 0-.089-.054-.143-.165-.156-.325-.404-.518-.661-.518-.043 0-.074.018-.074.044 0 .017.013.057.041.119l.066.148a.28.28 0 0 1 .027.076c0 .009-.016.018-.035.018-.038 0-.098-.034-.14-.078l-.123-.132-.108-.166c-.086-.132-.155-.192-.339-.289l-.198-.107c-.038-.022-.073-.033-.103-.033s-.044.014-.044.043.011.058.032.09c.061.094.085.141.085.166s-.019.049-.043.049c-.064 0-.197-.13-.315-.305-.097-.145-.149-.307-.149-.464 0-.355.259-.619.61-.619.181 0 .41.077.621.207l.388.24a1.02 1.02 0 0 0 .452.149 1.11 1.11 0 0 0 .242-.042c.036-.01.07-.023.099-.041.023-.014.039-.032.039-.046s-.009-.032-.023-.036l-.049-.016-.182-.034c-.047-.008-.122-.036-.223-.083l-.414-.206a1.61 1.61 0 0 0-.272-.156c-.067-.023-.109-.053-.109-.079s.013-.036.037-.036.054.011.088.032c.122.076.228.116.306.116l.355.124c.176.062.433.116.549.116.15 0 .271-.122.271-.273a.72.72 0 0 0-.143-.362c-.019-.027-.039-.042-.056-.042-.033 0-.051.024-.051.066l.008.067.013.098c0 .107-.072.191-.164.191a.44.44 0 0 1-.13-.026l-.537-.173c-.45-.145-.658-.19-.884-.19-.642.001-1.132.442-1.132 1.022 0 .657.462 1.081 1.603 1.472 1.123.385 1.281.479 1.281.768 0 .166-.168.314-.356.314a1.03 1.03 0 0 1-.198-.017l-.562-.132a4.18 4.18 0 0 0-1.028-.116c-.81 0-1.26.142-1.649.521-.097.095-.129.109-.241.109l-.056-.002-.116-.008h-.015c-.097 0-.169.045-.233.149l-.141.223c-.023.031-.058.049-.094.049-.074 0-.119-.056-.203-.256a.7.7 0 0 0-.173-.249l-.165-.149c-.027-.024-.042-.046-.042-.063s.012-.045.033-.077c.079-.115.107-.166.116-.215.022-.11.025-.115.071-.115s.066.051.07.24a.6.6 0 0 0 .067.223l.091.198c.011.026.041.042.074.042.085 0 .194-.201.223-.413.024-.177.042-.232.079-.232.015 0 .029.006.036.017s.034.061.074.148a.33.33 0 0 0 .108.124.34.34 0 0 0 .099.058l.01.001c.067 0 .09-.044.106-.2l.008-.082-.017-.116-.012-.08c0-.053.019-.085.049-.085.018 0 .04.012.062.033l.083.083c.055.054.11.082.163.082.069 0 .111-.028.111-.073 0-.04-.015-.079-.076-.191l-.058-.107c-.066-.125-.119-.174-.181-.174l-.083.008-.099.041c-.03.012-.052.019-.065.019s-.026-.014-.026-.033c0-.074.048-.191.14-.336.065-.105.11-.328.132-.649.028-.431.086-.711.146-.711.041 0 .057.035.057.127l-.004.113-.003.079c0 .077.017.139.037.139.071 0 .155-.105.222-.276.028-.072.043-.128.043-.168l-.009-.038-.165-.306-.165-.049c-.031-.009-.051-.023-.058-.037V1.9v-.09l-.016-.1-.033-.083-.041-.091c-.181.098-.267.163-.331.247s-.101.12-.133.12c-.021 0-.04-.02-.04-.044s.008-.065.025-.134l.005-.046c0-.178-.108-.352-.286-.465l-.05.066-.058.083-.099.116a.48.48 0 0 0-.083.165.32.32 0 0 0-.017.094v.013l.001.03c0 .069-.007.086-.038.086l-.02-.009-.058-.082-.09-.099-.14-.091-.074-.058-.033.074c-.026.058-.04.102-.041.133l-.009.132-.008.14.001.024c0 .072-.017.1-.061.1-.015 0-.057-.018-.097-.041a.24.24 0 0 0-.12-.034c-.129 0-.211.089-.211.228 0 .165.049.323.133.433l.107.14c.03.037.084.07.087.049l.02-.173-.025-.14-.005-.063c0-.053.018-.086.049-.086.047 0 .075.051.105.19l.066.306.067.388a1.54 1.54 0 0 0 .099.33 1.71 1.71 0 0 0 .141.289c.092.118.102.134.102.16 0 .02-.017.038-.035.038s-.043-.009-.074-.025a.34.34 0 0 0-.099-.033l-.065-.009c-.133 0-.16.114-.16.665a3.35 3.35 0 0 0 .159 1.012l.173.438a.54.54 0 0 1 .041.205c0 .602-.799 1.547-1.677 1.983-.327.161-.431.317-.454.676l-.008.108a1.15 1.15 0 0 1 .28-.116c.246-.064.294-.095.331-.215l.032-.107c.013-.04.033-.055.223-.157.065-.034.111-.072.133-.107.041-.07.076-.107.098-.107s.032.01.032.027l-.007.029-.082.236-.01.058c0 .175.096.317.274.408.029-.191.049-.256.108-.346l.148-.231c.055-.086.078-.19.078-.353a1.53 1.53 0 0 0-.02-.258l-.006-.065c0-.042.022-.066.058-.066.029 0 .052.01.055.024l.033.157c.006.034.041.058.08.058.08 0 .183-.103.291-.289a.44.44 0 0 0 .061-.219l-.012-.112-.025-.123-.002-.024c0-.054.03-.092.073-.092.019 0 .034.007.037.016l.049.191c.017.063.061.107.107.107.056 0 .151-.096.207-.207a.73.73 0 0 0 .075-.338c0-.176.018-.227.083-.227.021 0 .037.007.041.02l.033.107c.013.039.036.065.059.065.034 0 .127-.1.189-.206.017-.027.025-.055.025-.077 0-.035-.024-.071-.075-.113-.086-.071-.091-.082-.091-.19s.017-.158.091-.289l.107-.198c.104-.204.142-.257.18-.257.032 0 .06.034.06.073 0 .022-.006.045-.017.068a.28.28 0 0 0-.033.098c0 .032.027.059.061.059s.088-.017.146-.05c.022-.013.099-.042.231-.091l.273-.099a.49.49 0 0 1 .165-.025.66.66 0 0 1 .148.017l.191.034c.075 0 .141-.064.141-.138 0-.055-.057-.163-.133-.251-.028-.033-.042-.058-.042-.075l.01-.033c.011-.02.028-.033.044-.033.056 0 .186.139.303.323.711 1.119.966 1.39 1.446 1.535.486.148.613.261.613.55 0 .158-.081.341-.258.589-.36.503-.447.706-.447 1.038v.061l.091-.042c.067-.034.112-.054.132-.058l.206-.041c.062-.013.117-.092.117-.169l-.001-.021-.025-.182-.002-.024c0-.064.032-.138.076-.174.014-.01.028-.018.043-.018.023 0 .051.022.065.051l.132.28c.054.115.117.169.288.248zm-6.66-5.985c-.06 0-.091.052-.097.165l-.016.156-.02.2c0 .118.035.205.083.205s.135-.101.177-.216c.025-.07.037-.13.037-.193 0-.055-.013-.121-.037-.187-.03-.084-.076-.132-.126-.132zm-.685-1.205l.116.115c.189.188.282.501.282.945 0 .098-.005.163-.026.425l-.017.34c0 1.024.333 1.798.774 1.798.139 0 .229-.066.359-.264.021-.032.054-.06.075-.06s.039.021.041.068l.017.182.017.156.016.174.025.157.002.02c0 .046-.02.072-.059.079l-.149.025c-.074.012-.152.043-.231.091-.125.075-.199.132-.215.165l-.091.19c-.013.024-.032.041-.052.041-.048 0-.125-.084-.213-.231-.075-.126-.121-.228-.121-.27 0-.026.02-.036.073-.036h.016l.107.008.066-.008.075-.008c.028-.004.05-.028.05-.058 0-.022-.014-.041-.033-.049l-.371-.158a.58.58 0 0 1-.281-.272c-.102-.196-.15-.313-.15-.371 0-.051.027-.085.067-.085.014 0 .034.006.058.018a.33.33 0 0 0 .105.033c.032 0 .061-.025.061-.052s-.03-.062-.066-.089c-.31-.225-.488-.484-.488-.707 0-.062.015-.092.048-.092l.06.007.107.016h.005c.04 0 .071-.026.071-.064 0-.023-.028-.054-.068-.076l-.132-.074c-.046-.029-.085-.068-.115-.116a7.22 7.22 0 0 1-.14-.264l-.116-.223-.075-.14a.75.75 0 0 0-.05-.066.52.52 0 0 1 .257-.067c.113 0 .184.074.264.273.05.126.105.211.136.211.016 0 .021-.027.021-.097a.94.94 0 0 0-.034-.246c-.02-.075-.029-.137-.025-.182l-.016-.116-.099-.05c-.022-.01-.048-.04-.075-.082-.046-.072-.066-.133-.066-.188 0-.035.009-.092.025-.15.084-.315.108-.438.108-.54l-.001-.071.075.082z" data-v-7c4b1471></path><path d="M2.666 7.846c-.115-.017-.169-.03-.24-.058l-.157-.065c-.133-.057-.233-.084-.316-.084-.125 0-.227.042-.344.141l.074.057.165.116.157.124a.22.22 0 0 0 .136.05c.162 0 .303-.076.525-.281zm3.179 1.503c.045-.103.067-.131.165-.215.231-.198.338-.345.338-.462v-.083-.099c0-.106.018-.141.073-.141.025 0 .044.01.051.025l.041.098c.01.023.037.038.07.038s.079-.038.121-.096l.083-.115c.041-.058.06-.107.06-.161a.27.27 0 0 0-.018-.095l-.058-.132a.39.39 0 0 1-.034-.129c0-.054.027-.086.074-.086.024 0 .046.01.059.026l.066.082c.014.018.038.029.063.029.03 0 .065-.029.086-.07l.066-.132c.017-.033.026-.059.026-.074 0-.026-.027-.052-.075-.075l-.107-.05c-.142-.065-.249-.292-.249-.529a.24.24 0 0 1 .017-.09l.058-.141a.33.33 0 0 0 .025-.132c0-.086-.028-.172-.083-.256l-.108-.173c-.049-.13-.074-.167-.114-.167l-.042.01-.058.033-.091.05-.107.133-.115.057c-.069.035-.135.182-.135.303 0 .215.024.274.307.762.11.188.168.352.168.472 0 .338-.303.579-.911.725l-.38.091c-.28.067-.445.259-.504.587.116-.06.161-.068.366-.068l.122.002h.019c.11 0 .125-.008.195-.107.082-.114.162-.182.273-.23l.132-.058.107-.058.033-.009c.03 0 .046.012.046.034 0 .032-.03.086-.078.14-.095.106-.132.194-.132.32 0 .08.019.126.091.217z" data-v-7c4b1471></path><path d="M4.805 8.068a.95.95 0 0 1 .248.223c.079.108.09.116.168.116.033 0 .074-.006.121-.017l.074-.016c.136-.026.183-.046.183-.08 0-.02-.015-.045-.042-.069l-.091-.082-.091-.083a.45.45 0 0 0-.273-.093c-.099 0-.157.02-.298.101zm2.679.447l.049-.009.075-.008.074.008.036.003c.183 0 .36-.155.36-.319 0-.057-.052-.088-.146-.088-.223 0-.362.128-.449.414z" data-v-7c4b1471></path></g><g class="B" data-v-7c4b1471><path d="M4.402 2.81c.009.076.049.14.089.14.035 0 .075-.091.161-.237.117-.198.229-.392.229-.497 0-.035-.017-.062-.041-.062l-.008.003-.091.083c-.066.06-.118.088-.281.148-.093.036-.127.073-.127.141l.012.066.041.116.017.099z" data-v-7c4b1471></path><path d="M3.896 2.379l.14.075c.039.02.06.047.06.08l-.01.052c-.02.07-.004.147-.005.176-.007.148-.119.146-.165.146-.027 0-.013-.017-.078-.091l-.182-.206c-.056-.063-.105-.208-.124-.371l-.001-.021c0-.045.014-.062.05-.062.022 0 .063.018.091.042l.223.181zm.323.867h-.045l-.095-.009a3.81 3.81 0 0 1-.099-.059c-.037-.022-.078-.033-.118-.033-.075 0-.123.041-.123.104s.033.181.109.367l.108.264c.094.232.186.331.307.331.108 0 .123-.106.196-.289.165-.415.257-.476.257-.623 0-.1-.022-.127-.113-.127l-.077-.018-.132.084-.174.008z" data-v-7c4b1471></path></g><path d="M4.212 3.477h-.107c-.016 0-.032.009-.041.025l-.016.041.041.198.033.173c.018.095.053.133.124.133.081 0 .131-.055.14-.157l.016-.182.016-.188c0-.018-.009-.036-.025-.051s-.027-.018-.041-.018l-.008.001-.132.025z" class="C" data-v-7c4b1471></path><path d="M14.987 10.884c-.072-.176-.245-.3-.447-.3s-.375.124-.447.3h-.705c-.072-.176-.245-.3-.447-.3s-.375.124-.447.3h-.705c-.072-.176-.245-.3-.447-.3s-.375.124-.447.3H9.98v4.28h.81v1.641h.95v-1.64h2.399v1.64h.949v-1.64h.809v-4.28h-.911z" class="B" data-v-7c4b1471></path><g class="C" data-v-7c4b1471><path d="M10.965 15.539h.492v.632h-.492v-.632zm0 .774h.492v.2h-.492v-.2zm3.351-.774h.492v.632h-.492v-.632zm0 .774h.492v.2h-.492v-.2z" data-v-7c4b1471></path></g><g fill="#e73337" data-v-7c4b1471><path d="M10.309 11.234h5.256v.195h-5.256v-.195zm.002 3.599h5.254v-3.22h-5.256l.002 3.22z" data-v-7c4b1471></path></g><g class="C" data-v-7c4b1471><use xlink:href="#B" data-v-7c4b1471></use><path d="M12.715 11.1c0-.124.101-.225.225-.225s.225.101.225.225v.591c0 .124-.101.225-.225.225s-.225-.101-.225-.225V11.1zm1.598 0c0-.124.101-.225.225-.225s.225.101.225.225v.591c0 .124-.101.225-.225.225s-.225-.101-.225-.225V11.1zm-2.633 2.158c.339.424.766.633 1.294.633.489 0 .826-.175 1.222-.633-.395-.405-.763-.575-1.246-.575-.508 0-.966.207-1.27.575z" data-v-7c4b1471></path></g><g class="B" data-v-7c4b1471><path d="M12.938 12.873c-.364 0-.659.113-1.008.383.289.286.629.433 1.002.433.42 0 .694-.116 1.006-.425-.358-.284-.633-.391-1-.391zm-.014.483c-.195 0-.36-.052-.569-.183.174-.094.37-.142.578-.142a1.38 1.38 0 0 1 .597.142 1.03 1.03 0 0 1-.605.183zm2.079 1.191c-.157 0-.284-.127-.284-.283s.127-.284.284-.284.284.127.284.284-.127.283-.284.283z" data-v-7c4b1471></path></g><path d="M15.002 14.049c-.119 0-.216.097-.216.216s.097.215.216.215.216-.097.216-.215-.097-.216-.216-.216z" class="C" data-v-7c4b1471></path><path d="M10.887 14.547c-.161 0-.293-.127-.293-.283s.127-.284.284-.284.284.127.284.284a.28.28 0 0 1-.275.283z" class="B" data-v-7c4b1471></path><path d="M10.876 14.049c-.119 0-.216.097-.216.216s.101.215.225.215c.114 0 .208-.097.208-.215s-.097-.216-.216-.216z" class="C" data-v-7c4b1471></path><path d="M10.877 12.593c-.156 0-.284-.127-.284-.284s.127-.283.284-.283.284.127.284.283-.128.284-.284.284z" class="B" data-v-7c4b1471></path><use xlink:href="#C" class="C" data-v-7c4b1471></use><path d="M15.003 12.593c-.157 0-.284-.127-.284-.284s.127-.283.284-.283.284.127.284.283-.127.284-.284.284z" class="B" data-v-7c4b1471></path><path d="M15.002 12.092c-.119 0-.216.097-.216.216s.097.216.216.216.216-.097.216-.216-.097-.216-.216-.216z" class="C" data-v-7c4b1471></path></g><defs data-v-7c4b1471><clipPath id="A" data-v-7c4b1471><path fill="#fff" d="M0 0h148.235v30H0z" data-v-7c4b1471></path></clipPath><path id="B" d="M11.113 11.1c0-.124.101-.225.225-.225s.225.101.225.225v.591c0 .124-.101.225-.225.225s-.225-.101-.225-.225V11.1z" data-v-7c4b1471></path><path id="C" d="M10.876 12.092c-.119 0-.216.097-.216.216s.097.216.216.216.216-.097.216-.216-.097-.216-.216-.216z" data-v-7c4b1471></path></defs></svg></a></div><div class="gh-navAndProfile"><nav class="gh-nav"><div class="gh-accordion gh-mobile-nav"><div class="gh-menu-layer"><div class="gh-logo-mobile-container"><a href="https://www.cambridge.org/academic"><svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="149" height="30" viewbox="0 0 149 30" fill="none" aria-label="Homepage Cambridge University Press" class="gh-logo" data-v-7c4b1471><g clip-path="url(#A)" data-v-7c4b1471><g class="B" data-v-7c4b1471><path d="M147.065 1.786l.036 2.785-.445.114c-.1-1.217-.376-2.211-2.324-2.211h-2.618v5.147h2.247c1.103 0 1.402-.616 1.527-1.679h.447v4.304l-.446.007c-.069-1.029-.311-1.829-1.518-1.829h-2.257v4.52c0 1.314 1.133 1.314 2.332 1.314 2.151 0 3.149-.135 3.75-2.31l.441.098-.602 2.901h-9.774v-.482h.186c.75 0 1.443-.14 1.443-1.162V3.43c0-1.022-.693-1.163-1.443-1.163h-.186v-.481h9.204zm-53.267 0h-5.895v.481h.26c.486 0 1.388 0 1.388 1.525v9.569c0 .815-.748 1.104-1.388 1.104h-.26v.482h5.724v-.482h-.261c-.932 0-1.591-.115-1.591-1.6V8.766h1.454l3.918 6.18h3.371v-.492c-.889-.048-1.311-.356-1.828-1.153l-3.23-4.915.085-.035c1.152-.466 2.615-1.414 2.615-3.286 0-2.207-1.447-3.28-4.363-3.28zm-.749.536c1.685 0 2.804 1.123 2.804 2.858 0 2.138-.903 3.051-2.953 3.051h-1.125l.001-5.826a8.87 8.87 0 0 1 1.273-.083zm13.772-.539h-5.501v.481h.186c.751 0 1.443.142 1.443 1.163V13.3c0 1.021-.692 1.162-1.443 1.162h-.186v.481h5.501v-.481h-.186c-.75 0-1.444-.14-1.444-1.162V3.427c0-1.021.694-1.163 1.444-1.163h.186v-.481zM31.488 8.355c0 4.435 2.709 6.939 6.956 6.939 2.089 0 3.924-.818 5.112-2.354l-.345-.644c-1.055 1.355-2.452 2.089-4.412 2.089-3.487 0-4.927-3.074-4.927-6.324 0-3.652 1.781-5.919 4.647-5.919 2.79 0 3.616 1.617 4.112 3.383l.433-.056-.139-3.046c-1.373-.641-2.776-.945-4.46-.945-1.88 0-3.545.638-4.814 1.775-1.367 1.224-2.162 3.146-2.162 5.102zM59.883 4.33l4.761 10.706h.397l4.553-10.921.401 8.416.019.944c0 .893-.643.931-1.533.984l-.036.002v.475h5.464v-.475c-1.008-.038-1.544-.13-1.622-1.151l-.509-8.729c-.112-1.86.151-2.32 1.632-2.32h.07l.001-.485h-3.654l-4.231 9.975-4.472-9.974h-3.727v.479h.152c1.046 0 1.669.644 1.602 1.779l-.516 8.32c-.069 1.215-.161 2.046-1.657 2.106h-.169v.478h4.439v-.478h-.113c-1.21-.045-1.768-.203-1.689-2.003l.437-8.128zm55.196 10.61c1.826 0 3.63-.436 4.919-1.712 1.28-1.27 2.015-3.056 2.015-4.9 0-1.974-.668-3.636-1.93-4.81-1.28-1.191-3.113-1.741-5.301-1.741l-5.887.005v.482h.187c.749 0 1.443.141 1.443 1.162v9.871c0 1.023-.694 1.163-1.443 1.163h-.187v.483l6.184-.004zM114.1 2.349c3.704 0 5.505 1.986 5.505 6.347 0 2.806-1.514 5.669-4.841 5.669-1.413 0-2.003-.225-2.003-1.621l.001-10.334a17.99 17.99 0 0 1 1.338-.062zm19.119 11.918c-.493.224-1.326.347-2.132.33-3.496 0-5.062-3.303-5.062-6.636 0-3.628 1.855-5.882 4.914-5.882 2.267 0 3.277.887 3.823 3.15l.441-.028-.209-3.141c-1.018-.34-2.623-.651-4.184-.651-3.487 0-7.192 2.112-7.192 6.746 0 4.357 2.81 7.173 7.156 7.173 1.801 0 3.422-.307 4.63-.695l.002-3.592c0-.706.374-1.079 1.082-1.079h.153v-.47h-5.062v.47h.154c.973 0 1.487.374 1.487 1.079l-.001 3.226zM80.916 8.129c2.063 0 3.294 1.147 3.294 3.168 0 1.797-1.21 3.117-2.814 3.117-1.336 0-1.746-.368-1.746-1.654V8.129h1.265zm-.073-5.86c1.733 0 2.612.865 2.612 2.723 0 1.682-.999 2.562-2.813 2.562h-.991V2.269h1.192zm1.507 5.51c1.616-.261 3.451-1.032 3.451-3.117 0-1.883-1.511-2.878-4.256-2.878l-5.846.003v.481h.261c.484 0 1.387 0 1.387 1.524v9.548c0 .817-.748 1.121-1.387 1.121h-.261v.477l6.451.001c2.463 0 4.358-1.655 4.358-3.72 0-.866-.303-1.609-.877-2.195-.742-.759-1.624-1.124-3.281-1.245zM50.387 1.784l4.289 11.418c.318.903.783 1.258 1.463 1.258h.04l-.001.486H51.08v-.486h.116c.991 0 1.38-.415 1.082-1.327L51.24 10.24h-4.149l-.956 2.654c-.42 1.245.237 1.553 1.201 1.553h.174l.001.494h-4.008l.002-.494h.045c.811 0 1.358-.498 1.668-1.305 0-.001 4.18-11.36 4.18-11.36l.992.002zm-3.065 7.81h3.684l-1.84-5.127-1.845 5.127zM32.257 20.335c.66 0 .829.18.829.672v4.573c0 1.56 1.153 2.449 3.111 2.449 1.982 0 3.063-1.02 3.063-2.677v-4.141c0-.744.18-.876.793-.876v-.348l-1.093.024a30.18 30.18 0 0 1-1.237-.024v.348c.625.012.925.012.925 1.116v3.997c0 1.272-.757 2.077-2.102 2.077-1.333 0-2.054-.84-2.054-2.089v-4.43c0-.612.264-.672.781-.672h.084v-.348l-1.573.024c-.409 0-.841-.012-1.526-.024v.348zm9.028 0c.829.024 1.237.312 1.237.732v5.33c0 .78-.252 1.056-1.177 1.068v.348l1.429-.024a43.1 43.1 0 0 1 1.453.024v-.348h-.048c-.877 0-1.165-.336-1.165-1.128v-4.982l5.909 6.686h.312v-6.578c0-.6.216-1.128.853-1.128h.264v-.348l-1.429.024c-.288 0-.757-.012-1.393-.024v.348c.853 0 1.213.192 1.213 1.14v4.189l-5.045-5.678-1.081.024c-.432 0-.877-.012-1.333-.024v.348h0zm13.441 7.13c-.685-.012-.853-.12-.853-.66v-5.678c0-.672.168-.792.877-.792v-.348l-1.501.024c-.564 0-1.177-.012-1.633-.024v.348c.673 0 .865.18.865.696v5.462c0 .78-.168.972-.901.972v.348c.36-.012.709-.024 1.622-.024a56.45 56.45 0 0 1 1.525.024v-.348h0zm1.293-7.13c.601.012.829.072.973.432l2.931 7.238h.372l2.739-6.878c.132-.336.216-.492.288-.588.096-.12.18-.204.444-.204h.072v-.348l-1.057.024c-.325 0-.565-.012-1.057-.024v.348c.517.012.853.012.853.3 0 .144-.06.336-.156.6L60.631 26l-1.886-4.681c-.144-.348-.18-.528-.18-.66 0-.312.324-.312.829-.324v-.348l-1.742.024c-.541 0-.973-.012-1.633-.024v.348h0zm8.959 0c.877 0 1.009.084 1.009.744v5.642c0 .54-.132.732-1.009.744v.348l2.774-.024 3.375.012.42-1.789h-.312c-.204.552-.336.78-.468.924a1.14 1.14 0 0 1-.589.312c-.252.06-.709.096-1.381.096-.541 0-.865-.048-1.069-.156-.216-.108-.325-.312-.325-.732V23.96h1.55c.492 0 .829.156.829.816v.192l.3-.06c-.012-.324-.036-.648-.036-1.428l.012-.996h-.3c0 .708-.192.948-.877.948h-1.478v-2.977l1.502-.036c.829 0 1.021.084 1.201.24.168.156.288.348.372 1.02l.312-.12c-.036-.456-.048-.9-.048-1.116 0-.156.012-.348.012-.456l-3.231.024c-.853 0-1.694-.012-2.546-.024v.348h0zm7.686 0h.06c.745 0 .937.108.937.792v5.522c0 .612-.144.816-.949.816h-.06v.348a76.17 76.17 0 0 1 1.778-.024l1.682.024v-.348h-.084c-.757 0-.961-.192-.961-.624V24.14h.757l.588 1.068a22.31 22.31 0 0 0 .697 1.092l.672 1.032c.264.408.409.48.613.48.252 0 .793-.024 1.189-.024.276 0 .516.012.817.024v-.348a1.26 1.26 0 0 1-.913-.336c-.228-.204-.516-.552-.841-1.044L77.18 23.9c1.057-.396 1.621-1.044 1.621-1.957 0-1.26-.961-1.957-2.895-1.957l-1.982.024c-.288 0-.757-.012-1.261-.024v.348h0zm2.402.084c.192-.06.421-.084.613-.084 1.057 0 1.681.6 1.681 1.753 0 .828-.36 1.224-.673 1.416-.24.144-.516.228-.961.228h-.661v-3.313zm9.104-.624c-1.501 0-2.534.888-2.534 2.317 0 1.188.757 1.813 1.814 2.341.865.432 1.609.804 1.609 1.693 0 .66-.492 1.404-1.526 1.404-1.057 0-1.501-.684-1.826-1.765l-.3.048c.036.156.096.648.168 1.62.697.324 1.297.504 1.994.504 1.417 0 2.654-.876 2.654-2.317 0-.936-.48-1.704-1.802-2.389-1.153-.6-1.694-.936-1.694-1.68 0-.6.444-1.369 1.465-1.369.757 0 1.177.42 1.369 1.332l.312-.036c-.06-.504-.12-1.092-.12-1.549l-.757-.084c-.432-.048-.697-.072-.829-.072zm6.391 7.671c-.685-.012-.853-.12-.853-.66v-5.678c0-.672.168-.792.877-.792v-.348l-1.502.024c-.564 0-1.177-.012-1.633-.024v.348c.673 0 .865.18.865.696v5.462c0 .78-.168.972-.901.972v.348c.36-.012.709-.024 1.621-.024a56.9 56.9 0 0 1 1.525.024v-.348zm1.517-5.966c.144-.792.409-1.044 1.333-1.044h1.633v6.002c0 .936-.192 1.008-.901 1.008h-.156v.348l1.79-.024 1.766.024v-.348c-.865-.012-1.105-.156-1.105-.864v-6.146h1.67c.432 0 .721.072.865.216.132.132.204.264.24.9l.276-.108c0-.408 0-.852.144-1.789l-.204-.096c-.18.3-.288.396-.673.396h-5.933c-.348 0-.529-.132-.613-.396h-.252a10.45 10.45 0 0 1-.216 1.921h.336zm8.746-1.164c.433 0 .745.048.913.276l2.558 3.469v2.653c0 .672-.216.72-1.045.732v.348l1.826-.024 1.694.024v-.348c-.853 0-1.057-.144-1.057-.624v-3.085l2.126-2.977c.252-.348.504-.444.828-.444v-.348l-1.045.024c-.252 0-.672-.012-1.213-.024v.348c.493.012.793.024.793.24 0 .12-.144.348-.24.492l-1.477 2.221-1.73-2.329c-.156-.216-.18-.324-.18-.384 0-.204.288-.228.624-.24v-.348l-1.693.024c-.565 0-.889-.012-1.682-.024v.348zm14.689.036c.12-.012.36-.036.636-.036 1.405 0 1.453 1.729 1.453 1.981 0 1.116-.492 1.717-1.465 1.717h-.06v.312h.18c1.393 0 1.874-.408 2.198-.744.264-.276.565-.768.565-1.488 0-1.236-.805-2.125-2.607-2.125l-1.717.024c-.385 0-.985-.012-1.442-.024v.348c.565 0 .853.036.853.708v5.63c0 .672-.24.792-.865.792v.348l1.586-.024 1.838.024v-.348c-.649 0-1.154 0-1.154-.756l.001-6.338zm4.466-.036h.06c.744 0 .937.108.937.792v5.522c0 .612-.145.816-.949.816h-.06v.348c.42-.012 1.225-.024 1.777-.024l1.682.024v-.348h-.084c-.757 0-.961-.192-.961-.624V24.14h.757l.588 1.068c.216.372.529.84.697 1.092l.672 1.032c.265.408.409.48.613.48.252 0 .793-.024 1.189-.024.276 0 .517.012.817.024v-.348a1.26 1.26 0 0 1-.913-.336c-.228-.204-.516-.552-.841-1.044l-1.465-2.185c1.057-.396 1.621-1.044 1.621-1.957 0-1.26-.96-1.957-2.894-1.957l-1.982.024c-.288 0-.757-.012-1.261-.024v.348h0zm2.402.084a2.15 2.15 0 0 1 .613-.084c1.057 0 1.681.6 1.681 1.753 0 .828-.36 1.224-.673 1.416-.24.144-.516.228-.96.228h-.661V20.42zm6.516-.084c.877 0 1.009.084 1.009.744v5.642c0 .54-.132.732-1.009.744v.348l2.775-.024 3.375.012.42-1.789h-.312c-.204.552-.336.78-.469.924-.144.144-.3.24-.588.312-.252.06-.709.096-1.381.096-.541 0-.865-.048-1.069-.156-.217-.108-.325-.312-.325-.732V23.96h1.55c.492 0 .828.156.828.816v.192l.301-.06c-.012-.324-.036-.648-.036-1.428l.012-.996h-.301c0 .708-.192.948-.876.948h-1.478v-2.977l1.502-.036c.828 0 1.021.084 1.201.24.168.156.288.348.372 1.02l.312-.12c-.036-.456-.048-.9-.048-1.116 0-.156.012-.348.012-.456l-3.231.024c-.852 0-1.693-.012-2.546-.024v.348zm10.51-.54c-1.501 0-2.534.888-2.534 2.317 0 1.188.757 1.813 1.813 2.341.865.432 1.61.804 1.61 1.693 0 .66-.492 1.404-1.526 1.404-1.056 0-1.501-.684-1.825-1.765l-.3.048c.036.156.096.648.168 1.62.696.324 1.297.504 1.994.504 1.417 0 2.654-.876 2.654-2.317 0-.936-.481-1.704-1.802-2.389-1.153-.6-1.693-.936-1.693-1.68 0-.6.444-1.369 1.465-1.369.757 0 1.177.42 1.369 1.332l.313-.036c-.06-.504-.12-1.092-.12-1.549l-.757-.084c-.432-.048-.697-.072-.829-.072zm6.233 0c-1.501 0-2.534.888-2.534 2.317 0 1.188.757 1.813 1.813 2.341.865.432 1.61.804 1.61 1.693 0 .66-.493 1.404-1.526 1.404-1.056 0-1.501-.684-1.825-1.765l-.3.048c.036.156.096.648.168 1.62.696.324 1.297.504 1.994.504 1.417 0 2.654-.876 2.654-2.317 0-.936-.48-1.704-1.802-2.389-1.153-.6-1.693-.936-1.693-1.68 0-.6.444-1.369 1.465-1.369.757 0 1.177.42 1.369 1.332l.313-.036c-.06-.504-.12-1.092-.12-1.549l-.757-.084c-.432-.048-.697-.072-.829-.072zM0 0v15.599c0 7.67 4.03 9.859 5.967 10.911.356.193 6.333 3.162 6.715 3.352l.258.138.257-.137 6.716-3.352C21.85 25.459 25.88 23.27 25.88 15.6V0H0z" data-v-7c4b1471></path></g><path d="M12.939 29.365l2.633-1.309V15.762h9.746l.001-.163V10.5h-9.747V.559h-5.266V10.5H.559v5.099l.001.163h9.746v12.294l2.633 1.309z" fill="#fff" data-v-7c4b1471></path><g fill="#e73337" data-v-7c4b1471><path d="M.559.559h9.747V10.5H.559V.559zm15.015 0h9.747V10.5h-9.747V.559zm-5.269 15.205H.559c.056 7.126 3.712 9.191 5.674 10.256.199.108 2.281 1.146 4.073 2.038V15.764zm5.269 0v12.294l4.073-2.038c1.961-1.065 5.618-3.13 5.673-10.256h-9.746z" data-v-7c4b1471></path></g><g class="B" data-v-7c4b1471><path d="M19.073 13.234l.201-.239-.254-.302-.255.302.202.239c.003.12.01 1.175-.611 1.552 0 0 .207.04.486-.014a3.1 3.1 0 0 0 .179.588c.13-.313.179-.588.179-.588.278.054.486.014.486.014-.622-.377-.614-1.431-.611-1.552zm1.59-1.791l.201-.239-.254-.302-.255.302.202.239c.003.121.011 1.175-.611 1.551 0 0 .207.04.486-.014a3.09 3.09 0 0 0 .179.588c.13-.314.179-.588.179-.588.278.053.486.014.486.014-.622-.377-.614-1.431-.611-1.551zm1.586 1.791l.201-.239-.254-.302-.254.302.201.239c.003.12.01 1.175-.612 1.552 0 0 .207.04.486-.014a3.07 3.07 0 0 0 .179.588c.13-.313.179-.588.179-.588.278.054.486.014.486.014-.622-.377-.614-1.431-.611-1.552zm1.586-1.791l.201-.239-.254-.302-.254.302.201.239c.003.121.011 1.175-.611 1.551 0 0 .207.04.485-.014a3.11 3.11 0 0 0 .179.588c.131-.314.179-.588.179-.588.278.053.485.014.485.014-.622-.377-.614-1.431-.611-1.551zm-6.348 0l.201-.239-.255-.302-.254.302.201.239c.003.121.011 1.175-.611 1.551 0 0 .207.04.486-.014a3.11 3.11 0 0 0 .179.588c.13-.314.179-.588.179-.588.278.054.486.014.486.014-.622-.377-.614-1.431-.611-1.551zM3.733 13.234l.201-.239-.254-.302-.254.302.201.239c.003.12.011 1.175-.611 1.552 0 0 .207.04.486-.014a3.07 3.07 0 0 0 .179.588c.13-.313.179-.588.179-.588.278.054.486.014.486.014-.622-.377-.615-1.431-.611-1.552zm1.59-1.791l.201-.239-.254-.302-.254.302.201.239c.003.121.011 1.175-.612 1.551 0 0 .207.04.486-.014 0 0 .048.275.178.588.131-.314.179-.588.179-.588.278.054.486.014.486.014-.622-.377-.614-1.431-.611-1.552zm1.586 1.791l.201-.239-.254-.302-.255.302.202.239c.003.12.01 1.175-.611 1.552 0 0 .207.04.485-.014a3.07 3.07 0 0 0 .179.588c.13-.313.179-.588.179-.588.278.054.486.014.486.014-.622-.377-.614-1.431-.611-1.552zm1.586-1.791l.201-.239-.254-.302-.254.302.201.239c.003.121.011 1.175-.611 1.551 0 0 .207.04.485-.014 0 0 .049.274.179.588.13-.314.179-.588.179-.588.278.053.486.014.486.014-.622-.377-.614-1.431-.611-1.551zm-6.348 0l.202-.239-.255-.302-.254.302.201.239c.003.121.011 1.175-.611 1.551 0 0 .207.04.486-.014a3.12 3.12 0 0 0 .179.588c.131-.314.179-.588.179-.588.278.054.485.014.485.014-.622-.377-.614-1.431-.611-1.551zm10.844 5.803l.201-.239-.254-.302-.254.302.202.239c.003.121.01 1.175-.612 1.551 0 0 .207.04.486-.014a3.07 3.07 0 0 0 .179.588c.13-.314.179-.588.179-.588.278.054.486.014.486.014-.622-.377-.615-1.431-.611-1.551zm0 5.387l.201-.239-.254-.302-.254.302.202.239c.003.12.01 1.175-.612 1.552 0 0 .207.04.486-.014a3.07 3.07 0 0 0 .179.588c.13-.314.179-.588.179-.588.278.054.486.014.486.014-.622-.377-.615-1.431-.611-1.552zm1.48-2.694l.202-.239-.255-.302-.254.302.201.239c.003.121.011 1.175-.611 1.552 0 0 .207.04.486-.014a3.11 3.11 0 0 0 .178.589c.131-.314.179-.589.179-.589.278.054.485.014.485.014-.622-.377-.614-1.431-.611-1.552zm-2.957 0l.202-.239-.255-.302-.254.302.201.239c.003.121.011 1.175-.611 1.552 0 0 .207.04.486-.014a3.09 3.09 0 0 0 .179.589c.131-.314.179-.589.179-.589.278.054.486.014.486.014-.622-.377-.614-1.431-.611-1.552zm2.957 5.387l.202-.239-.255-.302-.254.302.201.239c.003.121.011 1.175-.611 1.552 0 0 .207.04.486-.014a3.11 3.11 0 0 0 .178.588c.131-.314.179-.588.179-.588.278.054.485.014.485.014-.622-.377-.614-1.431-.611-1.552zm-2.957 0l.202-.239-.255-.302-.254.302.201.239c.003.121.011 1.175-.611 1.552 0 0 .207.04.486-.014a3.07 3.07 0 0 0 .179.588c.131-.314.179-.588.179-.588.278.054.486.014.486.014-.622-.377-.614-1.431-.611-1.552zm1.477-20.59l.201-.239-.254-.302-.254.302.202.239c.003.12.01 1.175-.612 1.552 0 0 .207.04.486-.014a3.12 3.12 0 0 0 .179.588c.13-.314.179-.588.179-.588.278.054.486.014.486.014-.622-.377-.615-1.431-.611-1.552zm1.48-2.693l.202-.239-.255-.302-.254.302.201.239c.003.12.011 1.175-.611 1.552 0 0 .207.04.486-.014 0 0 .048.275.178.588.131-.314.179-.588.179-.588.278.054.485.014.485.014-.622-.377-.614-1.431-.611-1.552zm-2.957 0l.202-.239-.255-.302-.254.302.201.239c.003.12.011 1.175-.611 1.552 0 0 .207.04.486-.014a3.13 3.13 0 0 0 .179.588c.131-.314.179-.588.179-.588.278.054.486.014.486.014-.622-.377-.614-1.431-.611-1.552zm2.957 5.387l.202-.239-.255-.302-.254.302.201.239c.003.12.011 1.175-.611 1.552 0 0 .207.04.486-.014a3.1 3.1 0 0 0 .178.588c.131-.314.179-.588.179-.588.278.054.485.014.485.014-.622-.377-.614-1.431-.611-1.552zm-2.957 0l.202-.239-.255-.302-.254.302.201.239c.003.12.011 1.175-.611 1.552 0 0 .207.04.486-.014a3.07 3.07 0 0 0 .179.588c.131-.314.179-.588.179-.588.278.054.486.014.486.014-.622-.377-.614-1.431-.611-1.552zm11.221 13.502c.144.19.346.314.512.314a1.06 1.06 0 0 0 .223-.034.43.43 0 0 1 .11-.016c.04 0 .057.015.057.052a.56.56 0 0 1-.043.155c-.026.062-.043.112-.049.148l-.033.173-.083.148-.115.107-.033.034v.074.037c0 .202-.06.316-.223.425-.013.008-.033.022-.058.041l-.017.067-.082.288a1.73 1.73 0 0 1-.157.231c-.1.131-.165.237-.206.338-.034.083-.06.107-.114.107-.081 0-.162-.102-.176-.222a.42.42 0 0 1-.041.05l-.098.066-.158.107-.131.099-.091.091c-.016.016-.033.025-.047.025-.042 0-.085-.071-.126-.207l-.082-.271c-.022-.073-.034-.163-.034-.267 0-.137.009-.185.058-.295-.165.017-.212.027-.297.058a.29.29 0 0 1-.072.016c-.027 0-.044-.019-.044-.051a.51.51 0 0 1 .033-.122l.108-.28a.7.7 0 0 1 .566-.416.72.72 0 0 1 .211.044.45.45 0 0 0 .125.026c.096 0 .172-.07.172-.158 0-.044-.029-.091-.066-.108-.014-.007-.113-.037-.297-.091-.139-.041-.208-.092-.364-.273-.113-.131-.158-.168-.199-.168-.032 0-.049.021-.049.058l.008.11.003.051c0 .229-.068.448-.234.75l-.232.42c-.11.2-.157.352-.157.504 0 .166.072.313.165.339l.124.032a.09.09 0 0 1 .041.026c.011.013.026.034.026.047s-.015.046-.042.076c-.015.016-.064.08-.149.189-.142.186-.232.253-.405.305-.1.242-.263.383-.554.479l-.033.057c-.098.174-.317.331-.456.331h-.032c-.018.151-.126.308-.289.421l-.19.139a.34.34 0 0 0-.041.075c-.037.082-.072.116-.118.116-.088 0-.161-.082-.212-.24l-.075.049-.157.107c-.038.018-.135.047-.289.083-.203.047-.378.133-.422.207l-.074.123-.034.014c-.04 0-.087-.057-.107-.13a2.03 2.03 0 0 1-.075-.464c0-.293.097-.484.397-.783l-.001-.071c0-.456.348-.765.86-.765l.083.002.281.017.026.001c.152 0 .292-.058.444-.183.202-.165.261-.266.261-.451 0-.059-.016-.126-.079-.333a1.34 1.34 0 0 1-.061-.388c0-.341.25-.765.59-.999.087-.061.152-.141.152-.187 0-.019-.01-.034-.023-.034-.022 0-.059.023-.105.064-.04.036-.107.078-.198.125l-.132.066-.149.058c-.046.017-.069.042-.069.073 0 .018.007.044.019.075s.016.047.016.061-.014.052-.042.096l-.115.19c-.113.186-.195.266-.339.33-.093.272-.226.41-.421.438-.156.314-.294.423-.57.454-.038.121-.105.221-.248.38-.059.064-.095.109-.107.132-.065.116-.07.123-.1.123-.038 0-.124-.057-.189-.123a.5.5 0 0 1-.074-.1l-.075-.132c-.024-.043-.049-.062-.08-.062l-.053.003-.182.025a.58.58 0 0 0-.331.19l-.099.149c-.01.015-.025.024-.039.024-.074 0-.119-.163-.119-.433 0-.237.036-.395.118-.508l.231-.322c.032-.045.05-.089.05-.126a.17.17 0 0 0-.041-.106c-.049-.058-.096-.093-.132-.099l-.174-.024c-.031-.005-.044-.012-.044-.026s.009-.034.027-.057a.79.79 0 0 1 .207-.182c.205-.125.339-.174.477-.174a.81.81 0 0 1 .432.142l.239.157c.034.023.074.034.115.034.111 0 .194-.066.272-.215.076-.144.109-.26.109-.383a.51.51 0 0 0-.53-.534c-.143 0-.29.056-.43.165-.042.033-.07.064-.083.091l-.066.149c-.015.036-.039.057-.059.057-.042 0-.135-.119-.213-.272-.041-.08-.08-.179-.115-.298-.049-.161-.071-.282-.071-.412l.012-.224a.83.83 0 0 1-.149-.511.92.92 0 0 1 .059-.29.61.61 0 0 1-.091-.368l.008-.103.025-.115.017-.099c-.08-.104-.102-.174-.182-.536-.044-.205-.071-.294-.106-.347-.017-.023-.025-.041-.025-.05 0-.048.115-.092.247-.092l.116.009c-.029-.076-.035-.106-.035-.166a1.03 1.03 0 0 1 .06-.329c.044-.126.053-.161.053-.202 0-.098-.015-.197-.036-.252l-.066-.165a.23.23 0 0 1-.021-.085c0-.017.011-.023.041-.023.162 0 .472.175.715.405s.32.413.32.752l-.014.181c.225.197.323.36.323.537 0 .129-.042.215-.166.337-.223.222-.273.337-.273.622 0 .438.131.675.373.675.213 0 .377-.231.404-.57l.025-.314a3.22 3.22 0 0 1 .041-.297c.014-.075.025-.174.025-.215 0-.111-.073-.252-.29-.504-.232-.271-.31-.417-.31-.629 0-.254.176-.44.414-.44.059 0 .089.009.16.045.003-.144.01-.17.091-.372.064-.159.133-.272.165-.272.011 0 .028.012.049.033a.86.86 0 0 0 .108.09l.083.058a.75.75 0 0 1 .074.075c.051-.185.104-.272.256-.421.116-.113.159-.148.181-.148s.038.015.059.041l.099.131.14.158c.054.059.073.111.099.255l.082-.049.132-.074a.97.97 0 0 0 .1-.066c.022-.016.041-.026.052-.026s.029.024.039.059l.049.19.108.215c.032.064.042.114.042.212v.052l.055-.002c.163 0 .303.192.303.414 0 .294-.235.674-.54.876-.028.099-.035.139-.035.219 0 .657.173.794 1.183.945a5.56 5.56 0 0 1 .677.14l.653.215c.223.074.373.107.489.107.123 0 .191-.035.191-.1 0-.113-.165-.214-.672-.412l-.719-.28c-.731-.286-1.076-.731-1.076-1.388 0-.764.574-1.338 1.34-1.338.289 0 .57.06.976.207l.605.198c.101 0 .156-.069.171-.214.013-.124.047-.174.115-.174.101 0 .212.094.29.248.068.134.107.291.107.428a.47.47 0 0 1-.405.504.8.8 0 0 1-.665.342c-.329 0-.492-.065-1.054-.416-.201-.126-.287-.157-.434-.157-.197 0-.343.122-.343.287 0 .151.122.251.397.324.164.043.188.055.273.14.066-.016.096-.02.137-.02.268 0 .599.145.879.384.509.157.962.706.962 1.167 0 .509-.441.965-.935.965a3.37 3.37 0 0 1-.639-.102z" data-v-7c4b1471></path></g><g class="C" data-v-7c4b1471><path d="M22.437 23.219l.033-.091.033-.116c.006-.021.037-.068.091-.139.084-.11.126-.249.126-.411 0-.051-.013-.098-.044-.16-.016-.032-.025-.058-.025-.076 0-.011.006-.024.017-.038s.022-.023.033-.025l.005-.001c.021 0 .057.024.085.058.035.043.066.066.084.066.037 0 .076-.063.098-.157l.017-.083c0-.04-.033-.144-.074-.231-.016-.033-.024-.061-.024-.084 0-.029.019-.053.046-.053s.063.026.11.07c.023.022.045.034.063.034.052 0 .089-.053.128-.182l.033-.129c0-.024-.01-.038-.029-.038l-.029.002-.098.01c-.151 0-.364-.104-.464-.224l-.231-.281-.206-.273-.19-.124c-.031-.02-.049-.045-.049-.067 0-.031.031-.052.079-.052s.179.048.425.152a2.58 2.58 0 0 0 .837.183c.395 0 .721-.319.721-.707 0-.212-.095-.408-.327-.681-.043-.051-.118-.101-.153-.101-.027 0-.046.02-.046.049a.26.26 0 0 0 .025.093c.01.026.016.052.016.076 0 .038-.014.06-.039.06-.052 0-.106-.085-.151-.234-.055-.184-.162-.294-.414-.421a.89.89 0 0 0-.261-.093c-.016 0-.027.014-.027.036 0 .031.012.065.033.098a.41.41 0 0 1 .067.158v.008c0 .032-.016.05-.046.05-.053 0-.065-.014-.186-.199-.062-.095-.228-.22-.372-.281-.045-.019-.093-.031-.14-.033h-.005c-.03 0-.053.014-.053.032 0 .007.013.033.037.076l.012.047c0 .025-.017.045-.039.045-.05 0-.203-.126-.324-.265-.131-.151-.215-.343-.215-.494 0-.316.302-.596.64-.596.085 0 .157.015.212.042l.289.149.339.224c.195.127.462.223.624.223.06 0 .106-.008.26-.051.081-.022.131-.054.131-.084l-.003-.007-.029-.017-.182-.016c-.101-.01-.33-.113-.512-.232l-.38-.247-.338-.141c-.026-.01-.042-.027-.042-.043s.018-.031.039-.031c0 0 .032.006.094.017l.314.091c.079.022.206.081.38.173.344.183.569.264.734.264.191 0 .331-.131.331-.309 0-.06-.013-.157-.032-.236a.33.33 0 0 0-.05-.107c-.014-.021-.03-.032-.041-.032-.024 0-.037.025-.037.067l.004.057.003.048c0 .152-.059.206-.228.206-.137 0-.322-.055-.742-.222-.394-.155-.64-.214-.884-.214-.622 0-1.167.507-1.167 1.085 0 .499.332.882 1.011 1.161l.702.289c.581.239.794.41.794.636 0 .188-.175.309-.448.309-.118 0-.265-.032-.536-.119-.657-.209-1.085-.296-1.461-.296-.613 0-1.049.168-1.398.536-.046.049-.073.063-.114.063-.028 0-.056-.011-.118-.047a.4.4 0 0 0-.181-.059c-.06 0-.154.052-.198.109l-.082.107c-.024.031-.051.05-.073.05-.06 0-.166-.102-.232-.224-.041-.074-.069-.147-.082-.215-.011-.052-.017-.137-.017-.226l.008-.153.025-.165.025-.141c.006-.02.022-.033.04-.033.043 0 .062.039.062.129l-.002.052-.002.037c0 .235.093.45.196.45.081 0 .152-.144.211-.43.012-.057.035-.09.063-.09s.053.042.078.165c.015.08.068.191.124.264.032.042.066.067.094.067.068 0 .111-.101.145-.339.014-.092.041-.15.073-.15.014 0 .026.007.035.018a1.3 1.3 0 0 1 .058.091c.02.034.052.062.091.082s.074.033.096.033a.12.12 0 0 0 .077-.033c.037-.033.058-.058.058-.068s-.017-.038-.049-.073c-.054-.058-.096-.136-.149-.273L20 19.462c-.013-.03-.027-.05-.042-.058l-.026-.007-.049.007-.173.049-.054.009c-.023 0-.039-.015-.039-.039s.03-.062.068-.092c.209-.166.333-.445.38-.851.028-.247.06-.34.165-.487.022-.031.041-.049.052-.049.032 0 .05.025.05.073 0 .032-.009.072-.027.117a.29.29 0 0 0-.018.1l.001.031.017.05.091-.033.099-.115a.61.61 0 0 0 .109-.338c0-.079-.019-.134-.066-.19-.052-.062-.088-.087-.126-.087l-.056.004-.059.004c-.053 0-.08-.016-.08-.049l.014-.063.009-.091-.049-.206-.041-.1-.025-.083-.058.041-.124.074c-.032.019-.066.05-.1.091-.117.142-.125.149-.172.149-.03 0-.047-.011-.047-.031l.005-.035a.85.85 0 0 0 .025-.171c0-.144-.105-.361-.231-.481-.173.165-.265.346-.265.521v.099c0 .038-.012.057-.038.057h-.003c-.021-.003-.041-.019-.058-.041l-.099-.132a1.33 1.33 0 0 0-.133-.108l-.066-.057-.033.066c-.082.164-.11.262-.115.397-.004.11-.013.142-.039.142-.013 0-.03-.006-.052-.019-.125-.07-.137-.075-.191-.075-.149 0-.25.106-.25.263 0 .061.035.154.103.275.076.135.189.258.239.258.014 0 .017-.01.017-.051v-.017l-.008-.14-.017-.108-.001-.014c0-.032.012-.046.037-.046.075 0 .14.219.195.655.048.384.154.648.322.801.043.039.067.071.067.089s-.015.026-.033.026c-.024 0-.062-.015-.108-.041a.18.18 0 0 0-.09-.026c-.083 0-.175.096-.191.2l-.075.47-.058.661c-.011.125.006.293.05.495.038.178.058.309.058.391 0 .495-.308.886-.818 1.037l-.529.156c-.329.098-.491.315-.537.718.085-.067.111-.078.24-.098l.156-.025a.43.43 0 0 0 .141-.041c.015-.009.045-.04.091-.091l.14-.156c.053-.06.101-.094.13-.094.02 0 .035.022.035.048s-.02.078-.058.136-.06.135-.06.213c0 .105.028.172.135.324.013-.05.024-.083.033-.099a1.69 1.69 0 0 1 .124-.173c.114-.148.197-.302.198-.372.002-.128.014-.169.046-.169s.033.006.045.079c.003.018.012.035.025.05.019.021.038.033.05.033s.033-.012.066-.033c.057-.038.101-.077.132-.116s.062-.093.082-.149c.028-.072.042-.125.042-.156l-.001-.107c0-.028.012-.051.027-.051s.062.043.089.091c.018.032.03.042.053.042h.005c.03-.003.056-.015.074-.034.101-.098.166-.219.166-.31l-.009-.152-.033-.125-.002-.011c0-.026.019-.048.04-.048.037 0 .078.053.135.174.026.057.051.084.075.084.039 0 .07-.029.148-.142l.074-.107c.016-.024.025-.052.025-.083s-.009-.057-.025-.081l-.067-.099c-.011-.017-.016-.045-.016-.083a.81.81 0 0 1 .124-.388l.107-.207c.013-.034.027-.05.046-.05s.028.02.028.041l-.008.091v.066.066c.011.03.031.042.071.042.025 0 .054-.006.086-.017l.206-.075a1.42 1.42 0 0 0 .19-.091c.324-.179.424-.216.583-.216.095 0 .218.037.392.117.036.017.065.026.085.026s.05-.015.072-.042c.016-.019.024-.042.024-.063 0-.03-.011-.066-.033-.102-.026-.046-.051-.103-.051-.123s.014-.036.038-.036.047.02.062.044l.19.33c.154.274.354.464.529.504l.363.083c.113.025.141.069.141.209a.72.72 0 0 1-.331.616l-.298.191c-.108.068-.198.259-.198.416 0 .102.025.282.058.425l.058-.057.141-.083.124-.124.066-.182.066-.165c.032-.08.07-.132.096-.132s.045.045.045.126l-.001.023v.031c0 .118.015.17.107.348z" data-v-7c4b1471></path><path d="M17.543 25.575c0-.393.143-.564.712-.853 1.123-.569 1.728-1.091 1.728-1.488 0-.08-.026-.197-.1-.445a1.41 1.41 0 0 1-.071-.358c0-.183.151-.46.344-.632l.157-.14c.086-.077.15-.188.15-.261l-.001-.037-.002-.049c0-.082.032-.112.117-.116l.075-.008.064-.017c.027 0 .053.015.069.041l.041.066.058.066c.019.021.038.034.053.034s.033-.008.063-.024c.04-.022.071-.031.1-.031.043 0 .065.023.065.071 0 .166-.107.406-.298.661-.317.429-.421.66-.421.934 0 .215.062.404.182.56.021.028.033.05.033.061 0 .022-.021.059-.058.104l-.05.067c-.066.095-.11.132-.153.132-.032 0-.063-.027-.078-.067l-.033-.09c-.01-.026-.028-.043-.047-.043-.006 0-.02.014-.036.034-.021.027-.032.059-.032.091v.14c0 .056-.02.114-.058.165a.69.69 0 0 1-.116.123c-.031.027-.054.042-.065.042-.024 0-.047-.026-.059-.066l-.05-.165c-.007-.024-.024-.041-.041-.041-.028 0-.051.046-.058.116-.017.187-.049.301-.098.347l-.116.106c-.037.036-.081.061-.105.061s-.045-.011-.052-.027l-.058-.124c-.007-.015-.021-.025-.037-.025-.033 0-.058.066-.07.191-.03.297-.092.43-.256.545a.45.45 0 0 0-.206.231c-.072-.106-.1-.196-.1-.329a.3.3 0 0 1 .034-.157c.034-.062.053-.109.053-.137 0-.02-.013-.036-.03-.036-.008 0-.022.012-.041.032-.023.027-.059.055-.107.083-.149.085-.171.116-.207.281a.42.42 0 0 1-.38.371c-.28.057-.297.064-.397.158l-.01-.147zm-.534-8.514v.083l.001.122a.51.51 0 0 1-.018.134c-.027.115-.033.154-.033.187 0 .158.022.236.115.399.052.09.085.199.085.277 0 .033-.014.053-.036.053-.014 0-.029-.008-.04-.024l-.066-.091c-.053-.072-.103-.091-.237-.091l-.143.009a1.79 1.79 0 0 1 .099.289c.067.27.076.289.223.421s.161.15.161.198c0 .037-.026.067-.057.067l-.055-.008-.033-.003c-.045 0-.086.083-.086.175a.59.59 0 0 0 .053.208c.02.049.04.082.058.1l.083.082a.12.12 0 0 1 .033.085c0 .04-.023.074-.05.074a.19.19 0 0 1-.074-.027c-.021-.01-.04-.016-.058-.016-.037 0-.055.023-.055.068l.013.123.05.214c.011.048.042.102.091.158a.39.39 0 0 0 .124.099l.166.075c.016.012.024.036.024.066s-.024.058-.055.058a.7.7 0 0 1-.086-.017l-.084-.008c-.084 0-.09.005-.09.069a.52.52 0 0 0 .017.128l.033.132c.035.14.08.215.13.215.011 0 .032-.012.06-.033a1.02 1.02 0 0 1 .511-.173c.155 0 .332.019.399.041.038.013.065.02.079.02.02 0 .027-.02.028-.094l.017-.149.033-.256.024-.19v-.108l-.067.058c-.14.132-.229.173-.374.173-.38 0-.598-.427-.598-1.171 0-.307.045-.742.13-1.24.018-.105.026-.178.026-.229 0-.272-.172-.537-.472-.729z" data-v-7c4b1471></path><path d="M17.57 18.91l.074-.074.074-.067c.062-.054.111-.169.111-.254 0-.039-.008-.075-.028-.118-.029-.062-.084-.117-.117-.117-.028 0-.053.068-.064.175l-.017.124-.024.124-.009.124v.082zm-.726 3.377c.085.031.102.045.19.149.115.133.178.165.33.165a.42.42 0 0 0 .117-.016l.107-.033.083-.025c.04-.011.066-.028.066-.042s-.065-.072-.165-.139a1.59 1.59 0 0 0-.198-.116c-.071-.035-.131-.051-.189-.051-.116 0-.228.036-.339.109zm1.113 2.323c0-.266.262-.485.582-.485.16 0 .253.032.253.086 0 .042-.07.1-.223.181l-.24.132-.364.181-.008-.096zm3.363-2.003c.117-.024.157-.054.215-.057l.149.008a.48.48 0 0 0 .099-.066l.116-.082c.165-.113.174-.12.174-.156 0-.046-.067-.068-.206-.068-.26 0-.419.122-.546.422z" data-v-7c4b1471></path></g><g class="B" data-v-7c4b1471><path d="M18.827 17.58l.116.074c.066.042.121.073.165.09.128.053.174.071.174.12a.35.35 0 0 1-.017.094.51.51 0 0 0-.025.116c-.012.112-.08.141-.125.141-.056 0-.114-.086-.206-.248a1.91 1.91 0 0 1-.132-.33l-.005-.032c0-.02.012-.032.036-.032l.018.006z" data-v-7c4b1471></path><path d="M19.942 17.868l-.19.191c-.067.089-.067.167-.083.182s-.032.021-.058.025l-.012.001c-.048 0-.084-.035-.087-.083l-.008-.14-.009-.141v-.008c0-.054.03-.083.073-.092l.176-.049.181-.091c.053-.026.069-.033.078-.033.021 0 .039.022.039.048 0 .012-.006.03-.018.052l-.083.139z" data-v-7c4b1471></path><path d="M19.105 18.431l.124.092a.22.22 0 0 0 .111.028.51.51 0 0 0 .195-.053l.108-.049c.025-.011.047-.018.064-.018.066 0 .118.07.118.157 0 .122-.088.247-.216.429l-.09.191c-.072.103-.12.14-.179.14-.098 0-.146-.056-.293-.348-.106-.208-.149-.336-.149-.438 0-.087.045-.148.111-.148.027 0 .06-.006.096.016z" data-v-7c4b1471></path></g><path d="M19.216 18.884l.008.157c.004.071.063.142.12.142.076 0 .131-.115.16-.29l.019-.115c0-.061-.02-.077-.098-.077l-.044.002h-.058l-.037-.002c-.052 0-.072.04-.072.145l.002.038z" class="C" data-v-7c4b1471></path><path d="M8.359 21.057c.072.182.097.272.107.389l.016.346c.013.347.128.52.347.52.079 0 .145-.021.182-.058l.091-.091.036-.011c.058 0 .09.038.09.107l-.01.134c-.007.06.004.188.033.379a2.25 2.25 0 0 1 .025.333c0 .176-.013.244-.075.402.058.162.091.345.091.499 0 .18-.029.3-.099.402.033.142.041.199.041.28s-.011.187-.033.314c-.029.172-.041.305-.033.395l.001.021c0 .052-.016.078-.048.078s-.069-.024-.102-.066c-.05-.065-.083-.088-.121-.088-.057 0-.075.03-.16.278-.026.077-.093.18-.198.306-.082.098-.133.179-.149.24l-.049.173c-.01.022-.029.039-.05.041h-.01c-.136 0-.292-.292-.452-.842-.045-.156-.103-.225-.187-.225-.06 0-.121.04-.144.093l-.05.116c-.009.02-.026.033-.043.033-.066 0-.167-.121-.229-.273-.051-.126-.081-.263-.081-.38 0-.259.191-.452.766-.776.251-.141.348-.32.348-.643 0-.256-.077-.403-.297-.57-.175-.132-.275-.182-.368-.182-.078 0-.14.052-.14.118 0 .078.045.125.153.162.127.046.182.09.182.147s-.054.165-.141.299a.76.76 0 0 1-.264.273c-.039.227-.146.372-.397.537-.05.142-.078.193-.165.305l-.174.223a.72.72 0 0 0-.124.256c-.018.126-.03.148-.078.148-.066 0-.2-.116-.335-.289a.4.4 0 0 1-.107-.273c-.086.068-.113.077-.281.099-.101.014-.176.033-.223.059-.127.066-.168.084-.191.084-.034 0-.046-.018-.049-.076l-.008-.182c0-.249.098-.54.248-.734-.145-.125-.289-.202-.405-.215-.049-.005-.062-.011-.062-.029 0-.03.032-.095.078-.152.062-.078.111-.133.149-.165a.82.82 0 0 1 .479-.174c.107 0 .266.039.471.116.074.028.14.041.197.041.091 0 .142-.059.142-.163 0-.15-.074-.385-.241-.762-.112-.255-.163-.427-.163-.545 0-.286.135-.583.304-.668-.08-.04-.115-.05-.184-.05-.083 0-.148.021-.254.083-.122.071-.19.134-.19.176 0 .016.025.042.067.072.064.045.075.061.075.122 0 .204-.169.477-.405.654-.052.289-.185.464-.446.586-.034.157-.056.203-.182.38-.216.302-.266.399-.305.57-.02.089-.041.125-.068.125-.035 0-.108-.055-.188-.142l-.132-.158c-.035-.054-.053-.107-.091-.263-.06.017-.076.02-.118.02l-.121-.003-.083-.002c-.181 0-.373.041-.471.101-.036.022-.06.033-.072.033-.053 0-.097-.112-.097-.25 0-.298.083-.545.276-.814-.071-.059-.094-.081-.157-.148-.027-.03-.05-.05-.066-.058l-.091-.05c-.031-.017-.05-.037-.05-.054 0-.044.121-.159.264-.252.159-.102.285-.149.4-.149.135 0 .207.035.376.182.117.102.197.149.254.149.04 0 .084-.028.118-.075.028-.038.043-.08.043-.123 0-.158-.189-.424-.398-.562-.156-.102-.326-.157-.492-.157-.23 0-.428.114-.515.297l-.091.19c-.021.044-.048.067-.079.067-.053 0-.081-.024-.268-.241l-.214-.248a.66.66 0 0 1-.133-.214.49.49 0 0 0-.041-.107c-.007-.011-.036-.033-.082-.067-.166-.117-.287-.349-.356-.685l-.025-.107c-.202-.248-.323-.555-.323-.821a.36.36 0 0 1 .018-.113c-.118-.118-.131-.147-.224-.463-.066-.224-.108-.32-.19-.428-.022-.029-.033-.052-.033-.064 0-.045.069-.081.19-.101l.166-.019.165.011-.01-.226c0-.133.01-.219.044-.377a1.01 1.01 0 0 0 .025-.172 1.68 1.68 0 0 0-.017-.142.73.73 0 0 0-.024-.124c-.012-.031-.018-.055-.018-.07 0-.027.018-.041.049-.041.024 0 .058.012.109.036.144.071.263.143.355.215.285.223.509.566.509.779l-.004.065-.017.121c0 .036.02.084.058.134l.091.125c.081.11.133.238.133.324 0 .077-.016.111-.149.304-.1.144-.141.252-.141.369 0 .149.059.337.166.522.097.171.21.265.317.265.184 0 .265-.163.294-.604l.066-.627.008-.113c0-.052-.016-.082-.082-.161l-.249-.288c-.236-.276-.307-.418-.307-.616 0-.227.149-.384.365-.384l.099.009-.001-.064a.87.87 0 0 1 .042-.224c.052-.177.117-.314.151-.314.011 0 .038.013.08.041a1.22 1.22 0 0 0 .149.057.99.99 0 0 1 .164.082.73.73 0 0 1 .133-.289c.107-.138.228-.247.271-.247.026 0 .074.035.134.099l.157.165c.026.027.057.081.091.157l.033.074.099-.057.132-.075.157-.075c.042-.028.072-.043.085-.043.028 0 .043.05.105.323a1.9 1.9 0 0 1 .058.364c.264.037.388.188.388.471 0 .266-.131.531-.413.832-.077.083-.096.122-.096.199 0 .73.254.96 1.17 1.057a5.83 5.83 0 0 1 1.404.298c.355.106.559.15.691.15.156 0 .243-.057.243-.157 0-.153-.111-.201-.769-.34-1.531-.322-2.132-.845-2.132-1.854 0-.719.538-1.259 1.257-1.259.317 0 .6.097 1.048.355.416.241.502.282.597.282.111 0 .155-.071.155-.25 0-.116.035-.172.107-.172.066 0 .166.068.256.174.137.162.19.245.19.469 0 .299-.159.494-.471.58l-.074.082c-.17.192-.32.256-.59.256-.235 0-.53-.096-.732-.24l-.397-.28c-.171-.121-.296-.173-.416-.173-.169 0-.27.101-.27.27s.11.301.29.357l.372.116c.132.041.194.067.355.148l.185-.018a1.57 1.57 0 0 1 .699.174l.149.067.207.058c.469.131.851.656.851 1.168 0 .584-.379.946-.991.946l-.191-.009z" class="B" data-v-7c4b1471></path><g class="C" data-v-7c4b1471><path d="M8.218 25.961l.049-.1.116-.124a.61.61 0 0 0 .15-.427c0-.085-.015-.177-.043-.267a.67.67 0 0 1-.034-.146c0-.052.039-.102.079-.102.016 0 .039.015.062.042l.124.14c.077.087.103.103.199.124l.008-.115.008-.136a.96.96 0 0 0-.182-.558c-.066-.084-.096-.135-.096-.164s.022-.059.045-.059c.008 0 .02.006.034.017.08.059.121.084.143.084.04 0 .076-.1.076-.208a.77.77 0 0 0-.169-.461l-.099-.123c-.011-.016-.017-.032-.017-.047 0-.035.029-.061.072-.061.031 0 .05.008.102.041.024.016.047.025.063.025.064 0 .088-.09.088-.336l-.003-.102-.008-.149c-.009-.175-.025-.206-.099-.206H8.82c-.403 0-.591-.255-.695-.942-.075-.496-.26-.849-.487-.925-.136-.045-.184-.075-.184-.111 0-.023.038-.045.079-.045.03 0 .099.018.156.041.329.133.607.198.849.198.396 0 .708-.291.708-.66 0-.282-.134-.569-.376-.801a.47.47 0 0 0-.206-.124l-.016-.001-.042.014.017.119.032.083.009.034c0 .018-.017.032-.039.032s-.047-.013-.061-.034l-.149-.223c-.118-.176-.376-.323-.568-.323-.068 0-.111.03-.111.078 0 .023.022.066.05.097.042.048.075.105.075.132l-.009.033c-.009.019-.021.031-.033.033l-.008.001c-.04 0-.06-.018-.24-.223-.147-.168-.448-.332-.609-.332-.023 0-.044.01-.061.026-.01.011-.017.023-.017.032 0 .025.022.058.058.092.051.047.092.103.092.127 0 .016-.022.031-.046.031-.047 0-.153-.068-.351-.224-.308-.242-.446-.47-.446-.735 0-.319.228-.57.52-.57.171 0 .398.082.554.199l.405.305a1.07 1.07 0 0 0 .65.215c.052 0 .1-.012.143-.033.036-.018.058-.04.058-.055s-.031-.024-.075-.036c-.146-.035-.264-.085-.347-.148l-.413-.313-.323-.223c-.218-.114-.26-.141-.26-.167 0-.013.021-.024.047-.024s.047.006.073.017l.215.091a2.47 2.47 0 0 1 .347.206c.442.295.667.39.927.39.247 0 .429-.175.429-.413 0-.047-.012-.098-.034-.143l-.041-.082c-.014-.029-.041-.052-.049-.041l-.025.033-.025.174c-.001.007-.01.027-.025.058a.29.29 0 0 1-.057.082c-.045.044-.083.058-.152.058-.128 0-.271-.068-.658-.315s-.744-.379-1.024-.379A1.07 1.07 0 0 0 5.977 18c0 .827.505 1.2 2.09 1.546.603.131.868.308.868.576 0 .245-.198.399-.512.399a1.19 1.19 0 0 1-.224-.016 30.59 30.59 0 0 1-.851-.232c-.354-.098-.752-.154-1.103-.154-.622 0-1.041.185-1.326.584-.033.047-.066.075-.088.075s-.042-.012-.069-.033l-.115-.091a.56.56 0 0 0-.297-.113.26.26 0 0 0-.175.072c-.041.037-.072.057-.084.057-.046 0-.131-.104-.196-.239-.093-.193-.141-.386-.141-.562 0-.082.026-.14.062-.14.029 0 .05.022.062.066l.066.239c.037.134.112.239.169.239.025 0 .07-.036.12-.099.031-.038.053-.077.067-.116.052-.151.064-.173.105-.173.048 0 .073.03.106.181.007.028.016.07.054.142.033.062.067.113.099.149.025.027.056.042.085.042.073 0 .139-.121.139-.255l-.002-.035-.01-.153c0-.059.009-.077.043-.077.019 0 .034.01.058.04.035.046.083.085.141.115.05.027.091.041.118.041s.053-.015.08-.041.035-.044.035-.065-.03-.062-.117-.15c-.047-.048-.103-.16-.165-.33-.023-.065-.054-.109-.078-.109a.24.24 0 0 0-.054.018l-.107.041-.074.033-.028.006c-.035 0-.063-.025-.063-.057s.075-.121.166-.196c.065-.055.105-.136.107-.223.005-.139.031-.248.058-.248l.025-.289a2.98 2.98 0 0 1 .124-.569c.01-.03.023-.051.033-.051.023 0 .044.062.044.131l-.003.043-.005.084c0 .105.017.159.049.159s.077-.052.113-.143l.074-.19c.022-.056.033-.108.033-.154 0-.161-.096-.277-.23-.277l-.06.01-.058.017c-.043 0-.08-.048-.08-.104l.006-.119-.008-.165-.033-.173-.009-.083-.074.041-.091.075-.132.107-.108.091c-.019.016-.04.025-.061.025-.063 0-.071-.017-.071-.146v-.032c0-.104-.045-.185-.214-.384l-.042-.05-.032.058-.091.149-.091.231-.017.148c-.004.043-.021.076-.037.076s-.046-.022-.078-.059a.64.64 0 0 0-.355-.239l-.016.057-.05.174-.009.156v.066.053c0 .046-.017.073-.045.073l-.005-.001c-.003-.001-.019-.015-.049-.041-.019-.017-.054-.026-.1-.026-.154 0-.252.083-.252.215 0 .099.034.173.186.404.068.103.146.184.176.184.02 0 .037-.054.037-.117l-.015-.099a.41.41 0 0 1-.024-.126c0-.046.013-.071.039-.071.074 0 .151.214.183.512l.033.305a1.02 1.02 0 0 0 .099.33c.075.154.149.259.215.305.079.056.128.111.128.141s-.021.05-.044.05a5.64 5.64 0 0 1-.167-.059l-.069-.01c-.165 0-.22.094-.22.378a1.58 1.58 0 0 0 .488 1.044l.38.306c.193.154.339.411.339.595 0 .242-.265.479-.719.644l-.273.098c-.212.077-.297.214-.38.611a.68.68 0 0 1 .228-.042l.037.001.123.003c.111 0 .191-.067.191-.16v-.075c0-.022.031-.053.074-.075l.115-.058.091-.074c.014-.012.03-.019.046-.019.025 0 .045.024.045.051 0 .02-.006.046-.017.075-.028.073-.044.173-.044.284 0 .202.032.311.135.459.009-.055.018-.09.025-.107.017-.042.064-.117.141-.223.125-.175.217-.388.217-.503l-.003-.035-.025-.189c0-.034.015-.067.041-.091.018-.016.038-.024.057-.024.035 0 .057.021.085.082.019.046.043.074.059.074.029 0 .086-.076.122-.165.028-.066.041-.125.041-.175a.29.29 0 0 0-.025-.106.29.29 0 0 1-.026-.094c0-.021.007-.031.034-.055.014-.01.028-.017.042-.017s.033.007.049.017l.074.05c.025.017.048.025.066.025.033 0 .081-.037.116-.092.072-.11.117-.201.117-.235 0-.022-.014-.038-.059-.07-.164-.116-.273-.292-.273-.442 0-.035.008-.074.025-.119l.082-.223.066-.174.024-.009c.028 0 .045.023.045.064a.28.28 0 0 1-.019.094.54.54 0 0 0-.035.157c0 .059.03.091.086.091a.35.35 0 0 0 .138-.033c.25-.108.448-.166.567-.166.112 0 .182.025.3.108.031.022.058.034.076.034s.032-.013.048-.034c.01-.014.017-.029.017-.041 0-.03-.018-.069-.05-.107-.042-.052-.059-.085-.059-.117 0-.026.017-.043.044-.043.13 0 .333.41.527 1.059.152.513.195.565.685.843.415.234.573.466.573.843 0 .371-.103.846-.317 1.469-.067.197-.097.33-.097.44 0 .087.017.172.081.394l.057.19a.98.98 0 0 0 .065.124zm-1.916-1.156a1.38 1.38 0 0 1 .157-.281c.134-.201.182-.308.182-.404v-.133c0-.124.022-.182.072-.182a.05.05 0 0 1 .044.025l.042.066c.013.021.035.033.058.033s.069-.032.093-.069a.58.58 0 0 0 .096-.295l-.009-.091-.008-.093c0-.05.02-.072.068-.072.032 0 .046.013.073.075.017.04.042.066.062.066s.049-.018.069-.05l.066-.099.05-.075.012-.04c0-.023-.018-.044-.062-.076-.153-.11-.22-.28-.231-.587a2.62 2.62 0 0 0-.041-.454c-.005-.016-.054-.134-.149-.355a2.15 2.15 0 0 1-.099-.297c-.019-.076-.043-.1-.103-.1-.225 0-.394.192-.394.445 0 .149.062.384.175.67.159.401.201.544.201.686 0 .303-.121.435-.54.586-.473.169-.695.413-.777.85l.083-.033.115-.041.132-.033c.114-.028.205-.092.223-.157l.033-.115c.008-.028.024-.047.05-.058l.074-.033.074-.033.016.033-.033.107a.74.74 0 0 0-.017.192c0 .181.036.287.141.418zm-4.437-7.639l.017.093c0 .026-.011.097-.033.213l-.015.168a1.35 1.35 0 0 0 .072.435c.042.119.053.156.053.188l-.003.035c-.008.034-.02.059-.032.059s-.046-.018-.084-.052c-.095-.082-.198-.125-.305-.125a.81.81 0 0 0-.141.017l.041.091.124.306.174.264.181.125a.2.2 0 0 1 .05.049c.017.023.025.042.025.055s-.014.027-.033.028l-.166.008c-.025.001-.045.017-.045.035 0 .028.024.111.071.246l.083.24.14.132.148.108c.044.031.07.067.07.093s-.014.046-.029.047l-.082.008c-.027.003-.042.026-.042.066 0 .062.058.174.166.322.128.174.218.232.364.232h.124c.059 0 .095.029.095.076 0 .039-.025.061-.079.073-.114.024-.149.047-.149.1 0 .021.015.053.042.09l.067.091c.055.075.117.125.157.125.022 0 .074-.023.121-.076.074-.085.159-.154.268-.181.287-.073.347-.103.347-.169 0-.023-.011-.056-.033-.095-.091-.164-.124-.27-.124-.404 0-.103-.027-.175-.064-.175-.015 0-.041.025-.068.067-.068.104-.184.174-.289.174-.286 0-.631-.448-.801-1.041a1.65 1.65 0 0 1-.061-.445 2.32 2.32 0 0 1 .061-.504l.091-.405.011-.112c0-.205-.189-.468-.481-.673z" data-v-7c4b1471></path><path d="M2.46 18.315c-.061.115-.117.374-.117.538 0 .096.031.173.072.173.013 0 .03-.016.045-.041l.091-.166.059-.101c.033-.049.057-.097.057-.123 0-.098-.091-.219-.207-.28zm.907 3.723l.041.041.066.066c.161.167.185.183.263.183.072 0 .222-.041.307-.084.036-.018.058-.04.058-.057 0-.011-.03-.044-.082-.091l-.091-.083c-.069-.062-.171-.12-.22-.12-.095 0-.192.042-.342.145zm1.918 1.495a.82.82 0 0 1 .165.083l.132.082c.025.011.07.017.131.017.038 0 .072-.006.1-.017l.174-.066.165-.058c.044-.016.075-.041.075-.064 0-.012-.037-.03-.091-.043-.037-.01-.079-.023-.124-.041-.203-.082-.233-.094-.305-.094a.55.55 0 0 0-.422.201zm2.092 1.66c.055-.133.093-.155.281-.166.212-.012.414-.391.414-.648 0-.025-.009-.047-.019-.047s-.03.01-.056.027l-.123.074-.24.107c-.16.071-.301.285-.301.453a.57.57 0 0 0 .045.2z" data-v-7c4b1471></path></g><g class="B" data-v-7c4b1471><path d="M3.807 18.018l.082.116a.78.78 0 0 0 .179.129c.025 0 .038-.018.053-.071l.033-.125.009-.138c0-.109-.048-.15-.273-.208-.056-.015-.079-.013-.115-.042s-.079-.05-.099-.05-.034.01-.034.023l.009.037.041.123.116.207zm.883-.001l-.108.191-.041.045-.008.001c-.044 0-.088-.1-.091-.204l-.016-.164c.013-.068.053-.098.156-.116.084-.015.14-.031.166-.049l.132-.091.014-.003c.022 0 .038.015.038.04l-.003.011-.058.141-.182.197z" data-v-7c4b1471></path><path d="M4.474 18.579l.132-.066c.024-.012.045-.018.063-.018.064 0 .112.048.112.134 0 .094-.074.221-.175.403-.109.197-.191.331-.276.331-.119 0-.287-.189-.451-.505a.54.54 0 0 1-.063-.23c0-.104.046-.159.133-.159.036 0 .07.008.095.026l.074.05a.39.39 0 0 0 .21.066c.052 0 .101-.011.145-.033z" data-v-7c4b1471></path></g><path d="M4.384 18.761s.076.016.076.067l-.002.024-.025.123c-.027.165-.065.267-.119.267a.08.08 0 0 1-.063-.035c-.085-.12-.15-.274-.15-.358 0-.059.026-.089.079-.089l.014.001.091.016.099-.016z" class="C" data-v-7c4b1471></path><path d="M23.617 5.468l.033.124.067.264c.104.472.191.595.421.595.131 0 .24-.073.273-.182l.049-.166c.011-.038.04-.06.079-.06.09 0 .138.099.145.299.002.076.019.211.049.404.023.139.033.267.033.382 0 .084-.01.133-.049.237l.016.099c.028.163.035.227.035.303 0 .354-.017.459-.101.597.034.135.044.199.044.296a2.47 2.47 0 0 1-.027.291l-.033.248.008.148c.022.055.033.089.033.099 0 .023-.015.041-.033.041l-.165-.066-.182-.099c-.059-.032-.1-.064-.19-.148-.16.141-.183.154-.373.206-.274.076-.375.11-.429.149s-.093.057-.114.057c-.064 0-.109-.124-.109-.306 0-.264.015-.401.058-.504-.161-.054-.242-.074-.3-.074a.69.69 0 0 0-.121.016l-.037.004c-.024 0-.038-.029-.038-.079 0-.081.057-.242.133-.378.103-.184.274-.338.405-.364l.297-.057c.176-.034.281-.128.281-.251s-.077-.197-.257-.261l-.347-.124c-.117-.042-.227-.094-.331-.157l-.181-.108c-.03-.016-.062-.024-.09-.024-.073 0-.118.054-.118.141 0 .14.108.255.24.255.03 0 .058-.006.084-.017a.27.27 0 0 1 .076-.025c.038 0 .065.036.065.089a1.04 1.04 0 0 1-.19.522l-.099.124-.075.099c-.027.213-.236.517-.446.652-.051.032-.082.054-.091.066s-.029.047-.066.116c-.017.032-.073.098-.166.198a1.55 1.55 0 0 1-.206.19c-.092.07-.148.121-.165.149l-.083.141c-.017.029-.038.043-.065.043-.074 0-.142-.077-.183-.207-.03-.097-.057-.125-.12-.125a.64.64 0 0 0-.145.025l-.115.016h-.29c-.156 0-.365.038-.463.084-.037.018-.066.026-.085.026-.043 0-.064-.043-.064-.126 0-.262.202-.659.464-.916-.039-.152-.119-.254-.223-.28l-.149-.041c-.015-.008-.025-.023-.025-.039 0-.053.059-.105.19-.167.235-.111.303-.132.434-.132.15 0 .276.051.433.174.224.174.285.207.393.207.176 0 .345-.161.345-.331 0-.114-.063-.241-.218-.438-.22-.281-.298-.464-.298-.702 0-.16.032-.247.174-.478.022-.037.035-.068.035-.089 0-.045-.075-.086-.159-.086-.088 0-.198.032-.347.1-.249.114-.397.22-.397.285 0 .026.015.052.041.07l.083.058c.01.007.017.02.017.034s-.016.043-.034.064-.072.09-.165.215a1.89 1.89 0 0 1-.346.363c-.045.034-.072.062-.084.083l-.066.124c-.055.103-.218.274-.356.371l-.115.091a.75.75 0 0 0-.075.115c-.084.151-.217.338-.305.429l-.223.231c-.064.066-.154.257-.24.511-.068.2-.111.29-.142.29-.014 0-.037-.018-.064-.05-.16-.183-.305-.298-.375-.298-.018 0-.051.015-.096.042-.063.038-.138.067-.223.083l-.297.057c-.249.049-.348.102-.463.248-.029.036-.053.057-.066.057-.029 0-.051-.043-.051-.1l.008-.072c.002-.008.016-.082.042-.223.043-.235.103-.43.165-.537l.149-.256c.026-.046.041-.093.041-.133 0-.19-.178-.345-.447-.387-.055-.008-.083-.028-.083-.06 0-.038.065-.111.166-.188.175-.132.368-.215.505-.215.092 0 .215.032.379.099l.182.074a.49.49 0 0 0 .184.043c.192 0 .379-.175.609-.57.104-.18.149-.308.149-.425 0-.209-.097-.335-.26-.335-.128 0-.25.054-.351.156-.09.091-.137.182-.137.264l.005.084.003.044c0 .067-.023.097-.072.097-.062 0-.135-.061-.278-.232l-.232-.272c-.251-.294-.303-.411-.314-.711-.28-.357-.35-.555-.35-.762l.011-.17c-.201-.241-.24-.334-.24-.572 0-.107.006-.135.05-.221-.099-.145-.13-.21-.157-.322l-.041-.174a.9.9 0 0 0-.132-.305c-.074-.106-.083-.122-.083-.153 0-.086.083-.142.29-.193l.066-.017-.017-.183c0-.048.006-.094.025-.197l.058-.306c.022-.119.034-.21.034-.271 0-.07-.011-.127-.05-.257-.025-.084-.054-.148-.082-.19s-.05-.084-.05-.109.028-.05.066-.05c.02 0 .055.01.083.027.279.152.584.393.801.636.187.208.247.381.289.843.253.188.348.358.348.626 0 .159-.052.313-.166.488l-.165.256c-.049.083-.075.179-.075.279 0 .149.045.289.157.488.123.219.234.314.365.314.217 0 .413-.374.413-.786l-.002-.081v-.223l.025-.198.016-.24.016-.199.006-.11c0-.081-.017-.105-.271-.376-.28-.3-.413-.554-.413-.789 0-.222.191-.408.42-.408a.6.6 0 0 1 .175.032v-.099l.009-.174.049-.214c.04-.171.058-.203.117-.203l.041.013.149.132.181.116.05.049c.012-.103.021-.124.107-.232l.115-.157.083-.124.05-.074c.033-.05.047-.063.069-.063s.038.029.064.121c.011.037.036.075.074.107l.108.091c.087.074.145.183.181.339a.49.49 0 0 1 .165-.149l.133-.074c.049-.028.079-.042.088-.042s.023.02.037.05c.019.043.041.085.066.124.106.171.116.196.116.311v.069.099c.268.074.372.218.372.514 0 .222-.102.451-.315.707-.141.17-.165.222-.165.35 0 .582.439.935 1.181.946l.545.008a2.67 2.67 0 0 1 .901.148c.382.125.507.158.623.158.084 0 .154-.047.154-.103 0-.119-.2-.212-.868-.401-1.361-.384-1.942-.935-1.942-1.836 0-.698.583-1.237 1.336-1.237a2.88 2.88 0 0 1 .82.141l.338.124a.52.52 0 0 0 .178.035c.117 0 .161-.073.178-.291.008-.097.039-.14.101-.14.087 0 .256.136.378.306s.215.404.215.554c0 .176-.092.31-.29.421-.185.282-.413.412-.72.412-.235 0-.635-.159-1.139-.454-.198-.116-.285-.148-.395-.148-.149 0-.244.083-.244.214 0 .119.088.194.334.28.391.139.475.185.587.323.421.073.604.146.868.346.754.242 1.174.707 1.174 1.303 0 .543-.368.935-.88.935a1.59 1.59 0 0 1-.402-.049z" class="B" data-v-7c4b1471></path><g class="C" data-v-7c4b1471><path d="M24.436 9.084a.58.58 0 0 1-.026-.152l.009-.112.026-.321c0-.061-.018-.109-.06-.166l-.099-.132c-.064-.087-.09-.151-.09-.225 0-.048.011-.064.044-.064l.038.008.074.041c.034.019.062.028.083.028.05 0 .075-.077.075-.232 0-.215-.04-.318-.165-.415l-.115-.091c-.02-.016-.034-.043-.034-.067 0-.04.023-.065.061-.065.023 0 .056.007.096.017l.091.025.035.008c.028 0 .041-.022.041-.07l-.002-.029-.025-.174c-.026-.18-.056-.249-.109-.249l-.015.002-.215.041-.036.003c-.258 0-.562-.245-.641-.515l-.157-.537c-.047-.161-.187-.338-.397-.504-.069-.053-.129-.085-.173-.09l-.148-.017c-.027-.003-.045-.019-.045-.04 0-.034.029-.05.09-.05a2.35 2.35 0 0 1 .732.14c.271.089.419.121.552.121.414 0 .663-.216.663-.573 0-.179-.065-.404-.157-.548-.081-.124-.216-.251-.389-.363-.025-.017-.048-.026-.065-.026-.023 0-.042.018-.042.042l.008.107v.01c0 .04-.023.073-.053.073-.046 0-.093-.054-.146-.165-.152-.323-.398-.517-.65-.517-.055 0-.087.019-.087.053 0 .025.015.063.043.109a1.02 1.02 0 0 1 .074.148c.013.032.02.056.02.071s-.013.024-.038.024c-.037 0-.098-.034-.139-.077l-.124-.132-.107-.166a.79.79 0 0 0-.33-.289l-.207-.107c-.041-.022-.078-.033-.105-.033s-.039.013-.039.038.013.059.037.095a.42.42 0 0 1 .076.165c0 .028-.018.05-.041.05-.058 0-.162-.104-.307-.305a.78.78 0 0 1-.158-.464c0-.348.268-.619.613-.619.177 0 .399.074.618.207l.397.24c.142.086.334.149.453.149.066 0 .085-.004.233-.042a.4.4 0 0 0 .107-.041.06.06 0 0 0 .03-.049c0-.032-.02-.047-.072-.05-.184-.014-.566-.164-.817-.323l-.273-.156c-.068-.024-.114-.059-.114-.088 0-.013.014-.02.036-.02l.048.008.36.132.347.124a2.18 2.18 0 0 0 .555.116c.157 0 .265-.108.265-.265 0-.079-.02-.168-.052-.23-.05-.099-.116-.182-.143-.182s-.05.033-.05.088l.004.044.01.106c0 .111-.061.184-.156.184a.46.46 0 0 1-.135-.026l-.537-.173c-.467-.151-.648-.19-.878-.19-.649 0-1.139.438-1.139 1.022 0 .658.452 1.071 1.612 1.472 1.125.39 1.272.479 1.272.775 0 .17-.16.307-.359.307a1 1 0 0 1-.194-.017l-.562-.132c-.345-.084-.637-.116-1.027-.116-.819 0-1.214.125-1.642.521-.099.092-.139.109-.249.109l-.056-.002-.13-.008c-.099 0-.174.047-.234.149l-.132.223c-.018.031-.054.049-.095.049-.086 0-.131-.057-.212-.256a.64.64 0 0 0-.165-.249l-.174-.149c-.026-.023-.041-.044-.041-.06s.015-.044.041-.08a.84.84 0 0 0 .075-.115c.026-.047.039-.081.041-.1l.008-.074c.006-.024.03-.041.059-.041.055 0 .064.036.066.24a.59.59 0 0 0 .066.223l.091.198c.012.026.041.042.075.042.086 0 .194-.201.223-.413.024-.176.043-.232.079-.232s.049.021.119.166c.022.044.056.087.099.124s.075.054.099.058h.009c.068 0 .091-.043.107-.199l.008-.082-.016-.116-.011-.08c0-.053.018-.085.049-.085.018 0 .04.012.062.033l.082.083c.055.055.111.083.166.083.071 0 .115-.031.115-.081 0-.041-.009-.061-.082-.183a2.51 2.51 0 0 1-.058-.107c-.073-.139-.108-.174-.173-.174l-.084.008-.107.041c-.032.012-.054.019-.066.019s-.025-.014-.025-.033c0-.074.049-.192.141-.336.057-.09.104-.311.14-.649l.042-.388c.025-.238.051-.323.099-.323.017 0 .031.007.033.017l.024.124v.099c0 .12.013.218.028.218.035 0 .087-.041.137-.111.036-.05.063-.106.083-.165.027-.089.042-.161.042-.217 0-.191-.081-.273-.339-.345-.026-.007-.043-.016-.049-.025l-.009-.074v-.14l-.017-.1-.032-.083-.042-.091-.066.041c-.169.106-.21.138-.265.206-.075.095-.101.119-.132.119-.023 0-.041-.016-.041-.036l.033-.141.003-.024c0-.181-.102-.356-.284-.487l-.058.066-.058.083-.099.116a.39.39 0 0 0-.092.245l.001.027.001.028c0 .065-.01.088-.041.088l-.018-.008-.066-.082-.082-.099-.149-.091-.074-.058c-.08.173-.083.191-.083.414v.069c0 .074-.024.119-.064.119-.019 0-.054-.014-.101-.041-.037-.021-.082-.034-.123-.034-.12 0-.209.102-.209.241 0 .151.032.255.133.42.065.107.15.194.19.194.005 0 .015-.076.025-.177l-.025-.14-.005-.063c0-.053.019-.086.05-.086.046 0 .075.051.104.19l.066.306.066.388a1.54 1.54 0 0 0 .099.33 1.66 1.66 0 0 0 .14.289c.092.118.102.134.102.16 0 .02-.017.038-.036.038s-.043-.009-.075-.025a.32.32 0 0 0-.099-.033l-.06-.009c-.137 0-.165.11-.165.658 0 .342.071.798.159 1.019l.174.438a.56.56 0 0 1 .042.205c0 .604-.817 1.569-1.678 1.983-.335.161-.434.328-.463.784a1.57 1.57 0 0 1 .289-.116c.244-.068.296-.103.33-.215l.033-.107c.015-.046.045-.068.215-.157.064-.033.109-.07.132-.107.043-.071.076-.106.098-.106s.036.016.036.043l-.002.013-.091.236-.01.058c0 .173.097.317.275.408l.024-.099c.029-.126.058-.209.083-.248l.158-.231c.057-.084.067-.139.067-.352l-.009-.26-.005-.066c0-.043.02-.066.058-.066.029 0 .053.011.055.025l.024.157c.006.033.04.058.081.058.08 0 .182-.102.291-.289.042-.073.059-.13.059-.197a1.9 1.9 0 0 0-.026-.257l-.003-.049c0-.041.024-.067.063-.067.021 0 .037.007.04.016l.058.191c.019.063.063.107.108.107.127 0 .258-.259.272-.537.008-.174.03-.235.083-.235.021 0 .036.008.041.02l.033.107c.012.039.036.065.059.065s.063-.037.115-.099a.83.83 0 0 0 .074-.107c.016-.029.024-.059.024-.087 0-.042-.009-.053-.074-.103s-.091-.104-.091-.173c0-.12.051-.248.198-.504.135-.232.153-.259.186-.259s.055.031.055.07c0 .024-.006.049-.017.072-.022.048-.034.081-.034.099 0 .032.029.059.066.059a.34.34 0 0 0 .15-.05 1.66 1.66 0 0 1 .223-.091l.281-.099c.052-.016.111-.025.17-.025.048 0 .096.006.144.017.094.022.158.034.19.034a.13.13 0 0 0 .133-.131.35.35 0 0 0-.051-.143l-.074-.115c-.036-.038-.051-.059-.051-.074l.009-.033c.01-.02.028-.033.047-.033.065 0 .193.133.308.323.648 1.057.962 1.391 1.438 1.535.496.15.613.255.613.551 0 .157-.083.346-.258.588-.33.458-.447.726-.447 1.031l.001.068.091-.042c.068-.034.112-.054.133-.058l.206-.041c.065-.013.117-.09.117-.173l-.001-.016-.016-.182-.002-.029c0-.102.05-.187.112-.187.023 0 .049.021.063.05l.132.28c.053.115.116.17.288.248zM17.663 3.42l-.008.132-.002.081c0 .134.021.192.071.192s.141-.102.179-.215a.84.84 0 0 0 .042-.224.41.41 0 0 0-.042-.156c-.037-.09-.075-.132-.121-.132-.071 0-.104.091-.118.322z" data-v-7c4b1471></path><path d="M17.089 1.893l.116.115c.169.169.285.513.285.846l-.004.137-.025.388-.008.325c0 1.079.314 1.815.776 1.815.116 0 .258-.107.356-.265.021-.034.049-.06.066-.06.026 0 .039.022.041.068l.017.182.016.156.025.174.025.162c0 .083-.017.092-.215.119a.55.55 0 0 0-.231.091c-.128.082-.203.139-.215.165l-.091.19c-.012.024-.032.041-.053.041-.048 0-.113-.073-.204-.231-.084-.146-.121-.23-.121-.269 0-.026.021-.037.072-.037h.017l.099.008.074-.008.067-.008c.028-.007.049-.031.049-.061 0-.02-.013-.038-.033-.046l-.372-.158c-.089-.038-.185-.134-.272-.272-.095-.152-.151-.285-.151-.36 0-.059.025-.097.065-.097.016 0 .037.006.061.019a.31.31 0 0 0 .097.033c.014 0 .03-.006.044-.017s.025-.027.025-.04c0-.024-.025-.055-.066-.085-.309-.219-.497-.485-.497-.704 0-.064.015-.094.047-.094l.061.006c.045.009.099.017.116.017.045 0 .076-.025.076-.064 0-.024-.03-.054-.076-.076-.206-.103-.257-.162-.388-.455-.101-.224-.153-.318-.24-.429.108-.052.166-.067.257-.067.113 0 .184.074.264.273.05.126.105.211.137.211.015 0 .021-.026.021-.095a.96.96 0 0 0-.034-.248c-.02-.075-.029-.137-.024-.182l-.017-.116-.091-.05c-.084-.045-.141-.154-.141-.271a.61.61 0 0 1 .017-.15l.05-.191a1.6 1.6 0 0 0 .059-.349v-.071l.074.082zm.992 5.953a1.22 1.22 0 0 1-.396-.123.75.75 0 0 0-.31-.084c-.131 0-.231.041-.351.141l.074.057.166.116.158.124c.04.032.089.05.139.05.162 0 .299-.074.522-.281zm3.182 1.502a.47.47 0 0 1 .165-.215c.231-.198.339-.345.339-.462v-.083-.099c0-.106.017-.141.073-.141.025 0 .045.01.051.025l.041.098c.01.023.036.037.07.037.059 0 .101-.042.203-.21.043-.071.061-.117.061-.164 0-.029-.006-.061-.02-.093l-.058-.131a.4.4 0 0 1-.034-.129c0-.054.027-.086.074-.086.025 0 .047.01.06.026l.065.082c.015.018.019.029.044.029s.085-.029.106-.07l.066-.132c.017-.033.025-.059.025-.075 0-.028-.026-.056-.067-.073l-.116-.05c-.135-.058-.248-.291-.248-.512 0-.025.009-.061.025-.107l.05-.141a.41.41 0 0 0 .025-.137c0-.081-.025-.159-.082-.251l-.099-.173c-.048-.123-.076-.164-.117-.164-.029 0-.057.011-.106.039l-.083.05-.116.133-.116.057c-.069.035-.135.184-.135.309 0 .178.039.275.309.757.114.204.168.355.168.47 0 .336-.343.613-.903.727-.669.137-.815.248-.892.678.116-.06.162-.068.366-.068l.121.002h.019c.111 0 .125-.008.196-.107a.62.62 0 0 1 .273-.23l.132-.058.107-.058.033-.009c.03 0 .046.012.046.035 0 .028-.027.076-.079.139-.08.097-.126.196-.126.272 0 .11.018.166.085.265z" data-v-7c4b1471></path><path d="M20.223 8.069c.095.059.191.145.248.223.079.108.09.116.169.116a.52.52 0 0 0 .12-.017l.074-.016c.141-.027.183-.047.183-.085 0-.023-.013-.046-.035-.064l-.099-.082-.091-.083a.44.44 0 0 0-.261-.092c-.103 0-.186.027-.309.1zm2.675.446l.049-.009.083-.008.066.008.045.003c.084 0 .127-.009.162-.036.051-.039.104-.101.157-.19.021-.035.032-.068.032-.094 0-.056-.051-.088-.141-.088-.227 0-.366.126-.454.413z" data-v-7c4b1471></path></g><g class="B" data-v-7c4b1471><path d="M19.816 2.81c.009.077.049.14.09.14.035 0 .09-.118.176-.263.117-.199.214-.365.214-.471 0-.035-.018-.062-.041-.062l-.009.003-.091.084a.94.94 0 0 1-.273.148c-.106.038-.136.07-.136.14l.012.067.069.143-.011.072zm-.724-.611c-.029-.024-.07-.042-.092-.042-.035 0-.05.018-.05.06l.002.024c.006.045.018.092.032.14a.72.72 0 0 0 .109.231l.125.209a.43.43 0 0 0 .18.114l.006.001c.022 0 .041-.014.043-.034l.026-.165.041-.149.005-.035c0-.04-.023-.076-.062-.096l-.141-.075-.222-.181z" data-v-7c4b1471></path><path d="M19.637 3.248h-.044l-.096-.009c-.003-.001-.037-.021-.1-.059-.037-.022-.077-.033-.116-.033-.076 0-.121.047-.121.126 0 .062.037.182.105.344l.108.264c.091.231.185.33.31.33.107 0 .193-.088.26-.263l.091-.372c.044-.116.153-.164.153-.245 0-.095-.106-.186-.196-.186l-.048.009-.124.084-.182.008z" data-v-7c4b1471></path></g><path d="M19.627 3.477h-.099c-.015 0-.03.009-.041.025-.016.023-.025.038-.025.041l.041.198.033.173c.019.095.034.133.105.133.081 0 .15-.055.159-.157l.017-.182.016-.188c0-.018-.009-.036-.024-.051s-.027-.018-.043-.018l-.007.001-.132.025z" class="C" data-v-7c4b1471></path><path d="M8.2 5.467l.033.124.066.264c.104.472.192.595.421.595.131 0 .24-.073.273-.182l.05-.166c.011-.038.04-.06.079-.06.089 0 .131.093.135.3.002.059.021.194.059.404.022.124.032.245.032.36a.59.59 0 0 1-.049.259c.028.162.05.38.05.506 0 .204-.038.394-.099.493a1.04 1.04 0 0 1 .042.311 1.55 1.55 0 0 1-.026.275l-.044.285a.35.35 0 0 0 .019.111c.016.047.025.081.025.099 0 .023-.011.041-.025.041l-.165-.066-.182-.099c-.059-.032-.099-.064-.19-.148-.161.144-.208.166-.628.281-.084.022-.146.048-.182.074-.052.037-.09.057-.11.057-.071 0-.105-.114-.105-.35 0-.175.018-.337.05-.46a.94.94 0 0 0-.295-.074 1.68 1.68 0 0 0-.126.016l-.031.003c-.024 0-.044-.036-.044-.079 0-.032.022-.112.051-.179l.082-.198c.03-.072.098-.154.198-.239a.53.53 0 0 1 .215-.125l.297-.057c.177-.034.282-.128.282-.251s-.08-.199-.257-.261l-.356-.124a1.91 1.91 0 0 1-.323-.157c-.197-.117-.23-.132-.277-.132-.071 0-.112.054-.112.147 0 .147.096.25.232.25.028 0 .056-.006.083-.017.04-.016.069-.025.085-.025.037 0 .058.022.058.063l-.002.036-.017.14c-.014.12-.092.265-.264.496l-.074.099a1.14 1.14 0 0 1-.446.652l-.091.066c-.007.008-.029.047-.066.116-.017.032-.072.098-.165.198-.07.076-.139.139-.207.19-.092.07-.148.121-.165.149l-.083.141c-.015.025-.04.041-.067.041-.073 0-.14-.076-.182-.206-.03-.097-.057-.125-.12-.125-.032 0-.081.009-.144.025l-.116.016h-.298c-.148 0-.358.039-.454.084-.038.018-.067.026-.085.026s-.033-.011-.055-.051c-.015-.028-.018-.041-.018-.066 0-.151.108-.435.249-.661.09-.143.119-.176.223-.264-.033-.14-.123-.249-.231-.28l-.141-.041c-.019-.006-.033-.022-.033-.04 0-.043.067-.102.191-.167.161-.085.324-.132.454-.132.137 0 .27.054.422.174.224.175.284.207.393.207.177 0 .345-.162.345-.331 0-.113-.06-.234-.218-.438-.23-.298-.299-.452-.299-.671 0-.136.042-.309.093-.386l.083-.123c.017-.028.028-.055.028-.08 0-.055-.064-.095-.149-.095-.13 0-.381.093-.581.215-.099.061-.166.132-.166.176 0 .026.013.05.034.064l.091.058c.011.007.018.02.018.034s-.014.038-.034.064l-.165.215a2.76 2.76 0 0 1-.347.363.57.57 0 0 0-.083.083c-.001.001-.023.042-.066.124a1.3 1.3 0 0 1-.364.371c-.061.042-.099.072-.115.091s-.034.053-.066.115c-.021.04-.057.098-.108.173-.081.12-.148.205-.198.256L3.4 8.67c-.071.071-.133.209-.231.511-.058.183-.111.29-.142.29-.014 0-.037-.018-.065-.051-.164-.187-.305-.298-.379-.298-.017 0-.048.014-.092.042-.061.038-.136.067-.223.083l-.305.057c-.231.044-.322.094-.455.248-.032.036-.058.057-.072.057-.025 0-.044-.041-.044-.096l.008-.077.042-.223c.044-.239.098-.416.165-.537l.14-.256c.028-.05.042-.095.042-.134 0-.19-.197-.365-.437-.387-.061-.005-.084-.02-.084-.054 0-.042.067-.125.159-.193.165-.126.382-.215.52-.215.084 0 .213.035.371.099l.182.074a.49.49 0 0 0 .184.043c.192 0 .379-.175.609-.57.104-.179.149-.308.149-.424 0-.208-.099-.336-.262-.336a.53.53 0 0 0-.357.156c-.083.084-.127.172-.127.256l.003.092.001.02c0 .077-.026.12-.072.12-.078 0-.191-.111-.516-.504-.242-.293-.296-.418-.306-.711-.28-.357-.35-.508-.35-.762l.011-.17c-.194-.233-.249-.36-.249-.574 0-.078.013-.124.059-.218-.099-.145-.13-.21-.157-.322l-.041-.174c-.028-.118-.073-.22-.132-.305-.075-.107-.083-.121-.083-.158 0-.096.066-.134.356-.205-.012-.08-.017-.142-.017-.183 0-.048.006-.094.025-.197l.058-.306c.023-.123.035-.211.035-.264s-.009-.098-.06-.263a.9.9 0 0 0-.074-.19c-.027-.042-.05-.094-.05-.113 0-.029.02-.045.057-.045.082 0 .298.131.522.316.474.392.572.549.637 1.023l.025.166c.227.167.347.369.347.585a1.03 1.03 0 0 1-.165.529l-.166.256a.5.5 0 0 0-.074.273c0 .155.045.298.157.494.127.223.235.314.368.314.218 0 .409-.379.409-.81v-.058l-.001-.149c0-.084.005-.167.018-.272l.024-.24.009-.199.003-.13c0-.046-.024-.093-.086-.158l-.181-.198c-.263-.287-.407-.556-.407-.757a.42.42 0 0 1 .422-.439.6.6 0 0 1 .175.032v-.056c0-.364.075-.633.175-.633l.04.013.149.132.181.116.05.049c0-.102.005-.113.108-.232.025-.028.063-.081.115-.157l.082-.124.042-.074c.022-.042.044-.062.068-.062s.042.03.072.12c.019.056.032.071.174.198.049.044.083.081.098.108s.036.078.067.164l.025.067a.5.5 0 0 1 .165-.149l.124-.074c.046-.028.078-.043.092-.043s.026.02.041.05.035.07.066.124c.088.153.117.227.117.299l-.002.081V1.9c.271.085.373.225.373.517 0 .21-.11.451-.323.705-.142.169-.158.202-.158.32 0 .611.426.965 1.175.976l.553.008c.356.006.569.041.9.148.393.128.507.158.621.158.038 0 .076-.012.107-.034s.049-.049.049-.073c0-.116-.203-.209-.867-.397-1.363-.386-1.941-.934-1.941-1.836 0-.697.58-1.237 1.33-1.237a2.89 2.89 0 0 1 .826.141l.338.124c.062.023.122.035.178.035.115 0 .174-.096.177-.291.003-.104.026-.14.094-.14.095 0 .242.119.377.306s.224.401.224.542c0 .188-.088.319-.29.432-.185.282-.414.412-.72.412-.239 0-.578-.135-1.139-.454-.196-.111-.295-.148-.398-.148-.146 0-.241.084-.241.214 0 .119.092.196.334.28.357.125.445.174.586.323.421.073.604.146.868.346.725.23 1.175.724 1.175 1.291 0 .551-.373.947-.892.947a1.6 1.6 0 0 1-.391-.05z" class="B" data-v-7c4b1471></path><g class="C" data-v-7c4b1471><path d="M9.021 9.083a.91.91 0 0 1-.03-.199l.006-.065.025-.132.009-.107c0-.115-.019-.196-.058-.248L8.872 8.2c-.032-.044-.061-.097-.082-.158-.011-.03-.017-.054-.017-.069 0-.037.023-.061.06-.061.022 0 .031.004.106.048.032.019.06.028.081.028.049 0 .076-.08.076-.22 0-.159-.015-.248-.05-.288l-.124-.14-.107-.091c-.026-.022-.044-.05-.044-.072 0-.035.027-.06.067-.06s.117.018.183.042l.038.008c.034 0 .047-.02.047-.068l-.002-.03-.025-.174-.049-.181c-.016-.059-.025-.068-.063-.068l-.021.003-.206.041-.033.003c-.271 0-.557-.228-.644-.515l-.165-.537c-.051-.166-.175-.323-.397-.504-.067-.055-.124-.086-.165-.09l-.149-.017c-.027-.003-.045-.019-.045-.041 0-.034.032-.049.103-.049l.272.024a2.14 2.14 0 0 1 .446.116c.247.085.414.121.554.121.413 0 .661-.223.661-.597 0-.256-.134-.545-.338-.731-.117-.104-.233-.182-.273-.182-.023 0-.042.019-.042.044l.008.105v.01c0 .041-.024.073-.057.073-.042 0-.089-.054-.143-.165-.156-.325-.404-.518-.661-.518-.043 0-.074.018-.074.044 0 .017.013.057.041.119l.066.148a.28.28 0 0 1 .027.076c0 .009-.016.018-.035.018-.038 0-.098-.034-.14-.078l-.123-.132-.108-.166c-.086-.132-.155-.192-.339-.289l-.198-.107c-.038-.022-.073-.033-.103-.033s-.044.014-.044.043.011.058.032.09c.061.094.085.141.085.166s-.019.049-.043.049c-.064 0-.197-.13-.315-.305-.097-.145-.149-.307-.149-.464 0-.355.259-.619.61-.619.181 0 .41.077.621.207l.388.24a1.02 1.02 0 0 0 .452.149 1.11 1.11 0 0 0 .242-.042c.036-.01.07-.023.099-.041.023-.014.039-.032.039-.046s-.009-.032-.023-.036l-.049-.016-.182-.034c-.047-.008-.122-.036-.223-.083l-.414-.206a1.61 1.61 0 0 0-.272-.156c-.067-.023-.109-.053-.109-.079s.013-.036.037-.036.054.011.088.032c.122.076.228.116.306.116l.355.124c.176.062.433.116.549.116.15 0 .271-.122.271-.273a.72.72 0 0 0-.143-.362c-.019-.027-.039-.042-.056-.042-.033 0-.051.024-.051.066l.008.067.013.098c0 .107-.072.191-.164.191a.44.44 0 0 1-.13-.026l-.537-.173c-.45-.145-.658-.19-.884-.19-.642.001-1.132.442-1.132 1.022 0 .657.462 1.081 1.603 1.472 1.123.385 1.281.479 1.281.768 0 .166-.168.314-.356.314a1.03 1.03 0 0 1-.198-.017l-.562-.132a4.18 4.18 0 0 0-1.028-.116c-.81 0-1.26.142-1.649.521-.097.095-.129.109-.241.109l-.056-.002-.116-.008h-.015c-.097 0-.169.045-.233.149l-.141.223c-.023.031-.058.049-.094.049-.074 0-.119-.056-.203-.256a.7.7 0 0 0-.173-.249l-.165-.149c-.027-.024-.042-.046-.042-.063s.012-.045.033-.077c.079-.115.107-.166.116-.215.022-.11.025-.115.071-.115s.066.051.07.24a.6.6 0 0 0 .067.223l.091.198c.011.026.041.042.074.042.085 0 .194-.201.223-.413.024-.177.042-.232.079-.232.015 0 .029.006.036.017s.034.061.074.148a.33.33 0 0 0 .108.124.34.34 0 0 0 .099.058l.01.001c.067 0 .09-.044.106-.2l.008-.082-.017-.116-.012-.08c0-.053.019-.085.049-.085.018 0 .04.012.062.033l.083.083c.055.054.11.082.163.082.069 0 .111-.028.111-.073 0-.04-.015-.079-.076-.191l-.058-.107c-.066-.125-.119-.174-.181-.174l-.083.008-.099.041c-.03.012-.052.019-.065.019s-.026-.014-.026-.033c0-.074.048-.191.14-.336.065-.105.11-.328.132-.649.028-.431.086-.711.146-.711.041 0 .057.035.057.127l-.004.113-.003.079c0 .077.017.139.037.139.071 0 .155-.105.222-.276.028-.072.043-.128.043-.168l-.009-.038-.165-.306-.165-.049c-.031-.009-.051-.023-.058-.037V1.9v-.09l-.016-.1-.033-.083-.041-.091c-.181.098-.267.163-.331.247s-.101.12-.133.12c-.021 0-.04-.02-.04-.044s.008-.065.025-.134l.005-.046c0-.178-.108-.352-.286-.465l-.05.066-.058.083-.099.116a.48.48 0 0 0-.083.165.32.32 0 0 0-.017.094v.013l.001.03c0 .069-.007.086-.038.086l-.02-.009-.058-.082-.09-.099-.14-.091-.074-.058-.033.074c-.026.058-.04.102-.041.133l-.009.132-.008.14.001.024c0 .072-.017.1-.061.1-.015 0-.057-.018-.097-.041a.24.24 0 0 0-.12-.034c-.129 0-.211.089-.211.228 0 .165.049.323.133.433l.107.14c.03.037.084.07.087.049l.02-.173-.025-.14-.005-.063c0-.053.018-.086.049-.086.047 0 .075.051.105.19l.066.306.067.388a1.54 1.54 0 0 0 .099.33 1.71 1.71 0 0 0 .141.289c.092.118.102.134.102.16 0 .02-.017.038-.035.038s-.043-.009-.074-.025a.34.34 0 0 0-.099-.033l-.065-.009c-.133 0-.16.114-.16.665a3.35 3.35 0 0 0 .159 1.012l.173.438a.54.54 0 0 1 .041.205c0 .602-.799 1.547-1.677 1.983-.327.161-.431.317-.454.676l-.008.108a1.15 1.15 0 0 1 .28-.116c.246-.064.294-.095.331-.215l.032-.107c.013-.04.033-.055.223-.157.065-.034.111-.072.133-.107.041-.07.076-.107.098-.107s.032.01.032.027l-.007.029-.082.236-.01.058c0 .175.096.317.274.408.029-.191.049-.256.108-.346l.148-.231c.055-.086.078-.19.078-.353a1.53 1.53 0 0 0-.02-.258l-.006-.065c0-.042.022-.066.058-.066.029 0 .052.01.055.024l.033.157c.006.034.041.058.08.058.08 0 .183-.103.291-.289a.44.44 0 0 0 .061-.219l-.012-.112-.025-.123-.002-.024c0-.054.03-.092.073-.092.019 0 .034.007.037.016l.049.191c.017.063.061.107.107.107.056 0 .151-.096.207-.207a.73.73 0 0 0 .075-.338c0-.176.018-.227.083-.227.021 0 .037.007.041.02l.033.107c.013.039.036.065.059.065.034 0 .127-.1.189-.206.017-.027.025-.055.025-.077 0-.035-.024-.071-.075-.113-.086-.071-.091-.082-.091-.19s.017-.158.091-.289l.107-.198c.104-.204.142-.257.18-.257.032 0 .06.034.06.073 0 .022-.006.045-.017.068a.28.28 0 0 0-.033.098c0 .032.027.059.061.059s.088-.017.146-.05c.022-.013.099-.042.231-.091l.273-.099a.49.49 0 0 1 .165-.025.66.66 0 0 1 .148.017l.191.034c.075 0 .141-.064.141-.138 0-.055-.057-.163-.133-.251-.028-.033-.042-.058-.042-.075l.01-.033c.011-.02.028-.033.044-.033.056 0 .186.139.303.323.711 1.119.966 1.39 1.446 1.535.486.148.613.261.613.55 0 .158-.081.341-.258.589-.36.503-.447.706-.447 1.038v.061l.091-.042c.067-.034.112-.054.132-.058l.206-.041c.062-.013.117-.092.117-.169l-.001-.021-.025-.182-.002-.024c0-.064.032-.138.076-.174.014-.01.028-.018.043-.018.023 0 .051.022.065.051l.132.28c.054.115.117.169.288.248zm-6.66-5.985c-.06 0-.091.052-.097.165l-.016.156-.02.2c0 .118.035.205.083.205s.135-.101.177-.216c.025-.07.037-.13.037-.193 0-.055-.013-.121-.037-.187-.03-.084-.076-.132-.126-.132zm-.685-1.205l.116.115c.189.188.282.501.282.945 0 .098-.005.163-.026.425l-.017.34c0 1.024.333 1.798.774 1.798.139 0 .229-.066.359-.264.021-.032.054-.06.075-.06s.039.021.041.068l.017.182.017.156.016.174.025.157.002.02c0 .046-.02.072-.059.079l-.149.025c-.074.012-.152.043-.231.091-.125.075-.199.132-.215.165l-.091.19c-.013.024-.032.041-.052.041-.048 0-.125-.084-.213-.231-.075-.126-.121-.228-.121-.27 0-.026.02-.036.073-.036h.016l.107.008.066-.008.075-.008c.028-.004.05-.028.05-.058 0-.022-.014-.041-.033-.049l-.371-.158a.58.58 0 0 1-.281-.272c-.102-.196-.15-.313-.15-.371 0-.051.027-.085.067-.085.014 0 .034.006.058.018a.33.33 0 0 0 .105.033c.032 0 .061-.025.061-.052s-.03-.062-.066-.089c-.31-.225-.488-.484-.488-.707 0-.062.015-.092.048-.092l.06.007.107.016h.005c.04 0 .071-.026.071-.064 0-.023-.028-.054-.068-.076l-.132-.074c-.046-.029-.085-.068-.115-.116a7.22 7.22 0 0 1-.14-.264l-.116-.223-.075-.14a.75.75 0 0 0-.05-.066.52.52 0 0 1 .257-.067c.113 0 .184.074.264.273.05.126.105.211.136.211.016 0 .021-.027.021-.097a.94.94 0 0 0-.034-.246c-.02-.075-.029-.137-.025-.182l-.016-.116-.099-.05c-.022-.01-.048-.04-.075-.082-.046-.072-.066-.133-.066-.188 0-.035.009-.092.025-.15.084-.315.108-.438.108-.54l-.001-.071.075.082z" data-v-7c4b1471></path><path d="M2.666 7.846c-.115-.017-.169-.03-.24-.058l-.157-.065c-.133-.057-.233-.084-.316-.084-.125 0-.227.042-.344.141l.074.057.165.116.157.124a.22.22 0 0 0 .136.05c.162 0 .303-.076.525-.281zm3.179 1.503c.045-.103.067-.131.165-.215.231-.198.338-.345.338-.462v-.083-.099c0-.106.018-.141.073-.141.025 0 .044.01.051.025l.041.098c.01.023.037.038.07.038s.079-.038.121-.096l.083-.115c.041-.058.06-.107.06-.161a.27.27 0 0 0-.018-.095l-.058-.132a.39.39 0 0 1-.034-.129c0-.054.027-.086.074-.086.024 0 .046.01.059.026l.066.082c.014.018.038.029.063.029.03 0 .065-.029.086-.07l.066-.132c.017-.033.026-.059.026-.074 0-.026-.027-.052-.075-.075l-.107-.05c-.142-.065-.249-.292-.249-.529a.24.24 0 0 1 .017-.09l.058-.141a.33.33 0 0 0 .025-.132c0-.086-.028-.172-.083-.256l-.108-.173c-.049-.13-.074-.167-.114-.167l-.042.01-.058.033-.091.05-.107.133-.115.057c-.069.035-.135.182-.135.303 0 .215.024.274.307.762.11.188.168.352.168.472 0 .338-.303.579-.911.725l-.38.091c-.28.067-.445.259-.504.587.116-.06.161-.068.366-.068l.122.002h.019c.11 0 .125-.008.195-.107.082-.114.162-.182.273-.23l.132-.058.107-.058.033-.009c.03 0 .046.012.046.034 0 .032-.03.086-.078.14-.095.106-.132.194-.132.32 0 .08.019.126.091.217z" data-v-7c4b1471></path><path d="M4.805 8.068a.95.95 0 0 1 .248.223c.079.108.09.116.168.116.033 0 .074-.006.121-.017l.074-.016c.136-.026.183-.046.183-.08 0-.02-.015-.045-.042-.069l-.091-.082-.091-.083a.45.45 0 0 0-.273-.093c-.099 0-.157.02-.298.101zm2.679.447l.049-.009.075-.008.074.008.036.003c.183 0 .36-.155.36-.319 0-.057-.052-.088-.146-.088-.223 0-.362.128-.449.414z" data-v-7c4b1471></path></g><g class="B" data-v-7c4b1471><path d="M4.402 2.81c.009.076.049.14.089.14.035 0 .075-.091.161-.237.117-.198.229-.392.229-.497 0-.035-.017-.062-.041-.062l-.008.003-.091.083c-.066.06-.118.088-.281.148-.093.036-.127.073-.127.141l.012.066.041.116.017.099z" data-v-7c4b1471></path><path d="M3.896 2.379l.14.075c.039.02.06.047.06.08l-.01.052c-.02.07-.004.147-.005.176-.007.148-.119.146-.165.146-.027 0-.013-.017-.078-.091l-.182-.206c-.056-.063-.105-.208-.124-.371l-.001-.021c0-.045.014-.062.05-.062.022 0 .063.018.091.042l.223.181zm.323.867h-.045l-.095-.009a3.81 3.81 0 0 1-.099-.059c-.037-.022-.078-.033-.118-.033-.075 0-.123.041-.123.104s.033.181.109.367l.108.264c.094.232.186.331.307.331.108 0 .123-.106.196-.289.165-.415.257-.476.257-.623 0-.1-.022-.127-.113-.127l-.077-.018-.132.084-.174.008z" data-v-7c4b1471></path></g><path d="M4.212 3.477h-.107c-.016 0-.032.009-.041.025l-.016.041.041.198.033.173c.018.095.053.133.124.133.081 0 .131-.055.14-.157l.016-.182.016-.188c0-.018-.009-.036-.025-.051s-.027-.018-.041-.018l-.008.001-.132.025z" class="C" data-v-7c4b1471></path><path d="M14.987 10.884c-.072-.176-.245-.3-.447-.3s-.375.124-.447.3h-.705c-.072-.176-.245-.3-.447-.3s-.375.124-.447.3h-.705c-.072-.176-.245-.3-.447-.3s-.375.124-.447.3H9.98v4.28h.81v1.641h.95v-1.64h2.399v1.64h.949v-1.64h.809v-4.28h-.911z" class="B" data-v-7c4b1471></path><g class="C" data-v-7c4b1471><path d="M10.965 15.539h.492v.632h-.492v-.632zm0 .774h.492v.2h-.492v-.2zm3.351-.774h.492v.632h-.492v-.632zm0 .774h.492v.2h-.492v-.2z" data-v-7c4b1471></path></g><g fill="#e73337" data-v-7c4b1471><path d="M10.309 11.234h5.256v.195h-5.256v-.195zm.002 3.599h5.254v-3.22h-5.256l.002 3.22z" data-v-7c4b1471></path></g><g class="C" data-v-7c4b1471><use xlink:href="#B" data-v-7c4b1471></use><path d="M12.715 11.1c0-.124.101-.225.225-.225s.225.101.225.225v.591c0 .124-.101.225-.225.225s-.225-.101-.225-.225V11.1zm1.598 0c0-.124.101-.225.225-.225s.225.101.225.225v.591c0 .124-.101.225-.225.225s-.225-.101-.225-.225V11.1zm-2.633 2.158c.339.424.766.633 1.294.633.489 0 .826-.175 1.222-.633-.395-.405-.763-.575-1.246-.575-.508 0-.966.207-1.27.575z" data-v-7c4b1471></path></g><g class="B" data-v-7c4b1471><path d="M12.938 12.873c-.364 0-.659.113-1.008.383.289.286.629.433 1.002.433.42 0 .694-.116 1.006-.425-.358-.284-.633-.391-1-.391zm-.014.483c-.195 0-.36-.052-.569-.183.174-.094.37-.142.578-.142a1.38 1.38 0 0 1 .597.142 1.03 1.03 0 0 1-.605.183zm2.079 1.191c-.157 0-.284-.127-.284-.283s.127-.284.284-.284.284.127.284.284-.127.283-.284.283z" data-v-7c4b1471></path></g><path d="M15.002 14.049c-.119 0-.216.097-.216.216s.097.215.216.215.216-.097.216-.215-.097-.216-.216-.216z" class="C" data-v-7c4b1471></path><path d="M10.887 14.547c-.161 0-.293-.127-.293-.283s.127-.284.284-.284.284.127.284.284a.28.28 0 0 1-.275.283z" class="B" data-v-7c4b1471></path><path d="M10.876 14.049c-.119 0-.216.097-.216.216s.101.215.225.215c.114 0 .208-.097.208-.215s-.097-.216-.216-.216z" class="C" data-v-7c4b1471></path><path d="M10.877 12.593c-.156 0-.284-.127-.284-.284s.127-.283.284-.283.284.127.284.283-.128.284-.284.284z" class="B" data-v-7c4b1471></path><use xlink:href="#C" class="C" data-v-7c4b1471></use><path d="M15.003 12.593c-.157 0-.284-.127-.284-.284s.127-.283.284-.283.284.127.284.283-.127.284-.284.284z" class="B" data-v-7c4b1471></path><path d="M15.002 12.092c-.119 0-.216.097-.216.216s.097.216.216.216.216-.097.216-.216-.097-.216-.216-.216z" class="C" data-v-7c4b1471></path></g><defs data-v-7c4b1471><clipPath id="A" data-v-7c4b1471><path fill="#fff" d="M0 0h148.235v30H0z" data-v-7c4b1471></path></clipPath><path id="B" d="M11.113 11.1c0-.124.101-.225.225-.225s.225.101.225.225v.591c0 .124-.101.225-.225.225s-.225-.101-.225-.225V11.1z" data-v-7c4b1471></path><path id="C" d="M10.876 12.092c-.119 0-.216.097-.216.216s.097.216.216.216.216-.097.216-.216-.097-.216-.216-.216z" data-v-7c4b1471></path></defs></svg></a></div><div class="gh-mobile-menu-options"><div class="gh-menu-option"><button aria-expanded="false"><div class="apl-icon apl-icon--profile" style="color:;" title="Login and registration" data-v-27c0a44c><!----></div></button></div><div class="gh-menu-option"><a href="/core/shopping-cart" rel="nofollow"><div class="apl-icon apl-icon--shopping-cart" style="color:;" title="Shopping cart" data-v-27c0a44c><!----></div><!----></a></div><div class="gh-menu-option"><button aria-expanded="false" aria-controls="AdditionalMenuOptions"><div class="apl-icon apl-icon--chevron-down" style="color:;" title="Additional menu options" data-v-27c0a44c><!----></div></button></div></div></div><!----><!----></div><div class="gh-popovers gh-desktop-nav"><!----><div class="gh-popoverContainer gh-popoverContainer-productsAndServicesPopover" id="gh-productsAndServicesPopover"><button class="gh-popoverTrigger gh-popoverHeader" aria-expanded="false"><!--[--><div class="gh-menu-option"><div class="apl-icon apl-icon--grid" style="color:;" data-v-27c0a44c><!----></div> Products and Services </div><!--]--><svg width="12" height="7" viewbox="0 0 12 7" fill="none" xmlns="http://www.w3.org/2000/svg" class="gh-icon gh-chevron"><path d="M1.28033 0.21967C0.987436 -0.0732233 0.512564 -0.0732233 0.21967 0.21967C-0.0732231 0.512563 -0.0732231 0.987437 0.21967 1.28033L5.21967 6.28033C5.51256 6.57322 5.98744 6.57322 6.28033 6.28033L11.2803 1.28033C11.5732 0.987437 11.5732 0.512563 11.2803 0.21967C10.9874 -0.0732233 10.5126 -0.0732233 10.2197 0.21967L5.75 4.68934L1.28033 0.21967Z" class="fill-current" stroke="none"></path></svg></button><!----></div><div class="gh-profile"><div class="gh-menu-option"><a href="/core/register"><div class="apl-icon apl-icon--register" style="color:;" data-v-27c0a44c><!----></div>Register</a></div><div class="gh-menu-option"><a href="/core/login"><div class="apl-icon apl-icon--sign-in" style="color:;" data-v-27c0a44c><!----></div>Log In</a></div></div><!----><div class="gh-menu-option"><a href="/core/shopping-cart" rel="nofollow"><div class="apl-icon apl-icon--shopping-cart" style="color:;" data-v-27c0a44c><!----></div> (0) Cart</a></div></div></nav></div></div></div></div><!----><!--]--></div> </div></div><script></script> </div> <global-header id="global-header-wc" class="global-header" environment="prod" show-discovery-tool="false" register-url="/core/register?ref=/core/journals/robotica/article/robotics-goes-prisma/5DA1E6B0701411F71E5FFC40F2E53847" login-url="/core/login?ref=/core/journals/robotica/article/robotics-goes-prisma/5DA1E6B0701411F71E5FFC40F2E53847" logout-url="/core/logout?ref=/core/journals/robotica/article/robotics-goes-prisma/5DA1E6B0701411F71E5FFC40F2E53847" my-account-url="/core/my-core" basket-url="/core/shopping-cart" basket-items-count="0" style="display:none" ></global-header> </div> <script> const globalHeader = $('#global-header'); const globalHeaderWc = $('#global-header-wc'); const maintenanceMessage = $('#maintenance-message'); const maintenanceMessageWc = $('#maintenance-message-wc'); globalHeaderWc.on('initialized', function () { globalHeader.hide(); globalHeaderWc.show(); }); maintenanceMessageWc.on('initialized', function () { maintenanceMessage.hide(); maintenanceMessageWc.show(); }); </script> </header> <div class="off-canvas-wrap desktop overflow-visible"> <div class="inner-wrap"> </div> </div> <div id='platform-header'> <div class="__shared-elements-html ShEl"><div class="__shared-elements-head"> <link rel="stylesheet" href="/aca/shared-elements/_nuxt/entry.BhGMTrWu.css"> <link rel="prefetch" as="style" href="/aca/shared-elements/_nuxt/error-404.B06nACMW.css"> <link rel="prefetch" as="style" href="/aca/shared-elements/_nuxt/error-500.WGRfNq7F.css"> </div><div class="__shared-elements-body"><div id="__sharedElements-9qwv5c"><!--[--><div class="apl"><section class="apl-platform-header apl-theme--core" data-v-cabeb8f7><div tabindex="0" data-v-cabeb8f7><div class="apl-container apl-container--no-padding apl-container--full-width apl-platform-header__container--full-width" data-v-cabeb8f7><!--[--><div class="apl-container apl-platform-header__container apl-platform-header__container--mobile" data-v-5f67d08b data-v-cabeb8f7><!--[--><div class="apl-platform-header__branding" data-v-5f67d08b><!--[--><!--[--><a href="/core/" class="platform-logo" title="Cambridge Core homepage"><div><svg xmlns="http://www.w3.org/2000/svg" viewbox="0 0 209 20" fill="none" role="img" width="140" height="30"><title> Logo for Cambridge Core from Cambridge University Press. Click to return to homepage. </title><path d="M173.541 7.465c-4.172 0-7.648 3.521-7.648 7.747s3.476 7.746 7.648 7.746 7.648-3.521 7.648-7.746-3.337-7.747-7.648-7.747zm0 11.408c-2.086 0-3.616-1.69-3.616-3.662s1.669-3.662 3.616-3.662c2.086 0 3.615 1.69 3.615 3.662.139 2.113-1.529 3.662-3.615 3.662zm-159.357.704c-.973.563-1.947.704-3.198.704a8.31 8.31 0 0 1-3.337-.704c-.973-.423-1.808-.986-2.503-1.831-.695-.704-1.112-1.69-1.53-2.676s-.556-1.972-.556-3.099.139-2.253.556-3.38.834-1.972 1.53-2.676 1.53-1.408 2.503-1.831 2.086-.704 3.198-.704c.973 0 1.947.141 2.781.563.973.423 1.669.986 2.225 1.831l2.364-1.831c-.834-1.127-1.947-1.972-3.198-2.394-1.391-.563-2.642-.845-4.033-.845-1.669 0-3.059.282-4.45.845S4.033 2.817 3.059 3.944C2.086 4.93 1.391 6.197.834 7.465.278 8.873 0 10.282 0 11.972c0 1.268.278 2.676.695 3.944.556 1.268 1.251 2.394 2.086 3.521.973.986 2.086 1.831 3.337 2.535 1.391.563 2.781.986 4.45.986 1.252 0 2.225-.141 3.198-.422s1.669-.704 2.364-1.127 1.112-.845 1.53-1.268.695-.704.834-.986l-2.364-1.831c-.278.986-.973 1.69-1.947 2.254zm18.773.282v-7.183c0-.563-.139-1.268-.278-1.831-.278-.563-.556-1.127-1.112-1.549-.417-.423-1.112-.845-1.808-.986-.695-.282-1.669-.423-2.642-.423s-1.947.141-2.92.563c-.973.282-1.808.845-2.642 1.549l1.53 1.69c.278-.141.417-.422.695-.563s.556-.423.834-.563.695-.282 1.112-.423S26.56 10 27.116 10s1.112.141 1.53.282.695.422.973.704.417.563.556.986.139.704.139 1.127v.563h-1.947c-1.113 0-2.086.141-3.059.282s-1.808.422-2.503.845-1.251.845-1.669 1.549c-.417.563-.556 1.409-.556 2.254 0 .704.139 1.268.417 1.831s.556.986 1.113 1.268c.417.423.973.563 1.53.845.556.141 1.252.282 1.808.282 1.112 0 1.947-.141 2.781-.563s1.53-.986 2.086-1.972c0 .704 0 1.408.139 2.113h2.364c0-.423 0-.704-.139-1.127.417-.423.278-.845.278-1.408zm-2.364-3.239c0 .423 0 .986-.139 1.408s-.417.986-.695 1.268c-.278.422-.695.704-1.251.986s-1.112.422-1.808.422c-.973 0-1.669-.141-2.225-.563s-.834-.986-.834-1.69c0-.563.139-.986.556-1.408.278-.423.834-.563 1.391-.845.556-.141 1.112-.282 1.808-.423.695 0 1.252-.141 1.947-.141h1.391v.986h-.139zM56.039 10c-.417-.563-.973-.986-1.669-1.408s-1.53-.563-2.642-.563c-.834 0-1.669.282-2.503.704s-1.53 1.127-1.947 1.972c-.417-.845-.973-1.549-1.808-1.972-.695-.423-1.53-.704-2.642-.704-.695 0-1.251.141-1.669.282-.556.141-.973.423-1.391.704s-.695.563-.973.845-.417.563-.417.704V8.31h-2.503v14.085h2.642V15.07c0-.845.139-1.549.278-2.113s.417-1.127.834-1.408c.278-.423.695-.704 1.113-.845s.834-.282 1.39-.282c.695 0 1.113.141 1.53.282.417.282.695.563.834.986s.278.845.417 1.409c0 .563.139 1.127.139 1.831v7.324h2.642v-7.746c0-1.268.278-2.253.834-3.098s1.53-1.127 2.781-1.127c.695 0 1.112.141 1.53.282.417.282.695.563.973.845.278.422.417.845.417 1.268 0 .563.139.986.139 1.549v7.887h2.642v-8.169c0-.845-.139-1.549-.278-2.254 0-.422-.278-1.127-.695-1.69zm16.826.141c-.556-.704-1.391-1.127-2.225-1.549s-1.808-.563-2.92-.563c-.973 0-1.947.282-2.781.704s-1.53.986-2.086 1.69V0h-2.642v22.394h2.642v-1.972a6.17 6.17 0 0 0 2.086 1.69c.834.422 1.808.563 2.781.563 1.112 0 2.086-.141 2.92-.563s1.669-.845 2.225-1.549 1.112-1.408 1.391-2.394c.278-.845.556-1.831.556-2.958 0-.986-.139-1.972-.556-2.958-.278-.704-.695-1.408-1.391-2.113zm-1.252 7.183c-.278.563-.556 1.127-.973 1.549s-.834.704-1.391.986-1.251.422-1.947.422-1.391-.141-1.947-.422-1.112-.563-1.391-.986c-.417-.422-.695-.986-.973-1.549s-.278-1.268-.278-1.972.139-1.408.278-1.972c.278-.563.556-1.127.973-1.549s.834-.704 1.391-.986 1.252-.423 1.947-.423 1.391.141 1.947.423 1.112.563 1.391.986c.417.423.695.986.973 1.549s.278 1.268.278 1.972c.139.704 0 1.409-.278 1.972zM82.46 8.169c-.417.141-.834.282-1.112.563-.417.282-.695.423-.973.845-.278.282-.556.704-.695.986V8.451h-2.642v14.085h2.642v-7.042c0-1.549.278-2.676.973-3.38s1.669-1.127 2.92-1.127h.695c.278 0 .417.141.695.141l.139-2.817c-.417-.141-.695-.141-1.112-.141a2.82 2.82 0 0 0-1.53 0zm5.145 14.225h2.642V8.31h-2.642v14.085zm1.252-20.986a1.77 1.77 0 0 0-1.391.563c-.278.423-.556.845-.556 1.408s.139.986.556 1.408a1.77 1.77 0 0 0 1.391.563 1.77 1.77 0 0 0 1.39-.563c.417-.423.556-.845.556-1.408s-.139-.986-.556-1.408a1.77 1.77 0 0 0-1.39-.563zm15.574 9.014a6.17 6.17 0 0 0-2.085-1.69c-.835-.423-1.808-.704-2.781-.704-1.112 0-2.086.141-2.92.563s-1.669.845-2.225 1.549-1.112 1.409-1.391 2.394c-.278.845-.556 1.831-.556 2.958 0 .986.139 1.972.556 2.958.278.845.834 1.69 1.391 2.394s1.391 1.268 2.225 1.549c.834.422 1.808.563 2.92.563.974 0 1.807-.141 2.781-.563a6.17 6.17 0 0 0 2.085-1.69v1.972h2.643V0h-2.643v10.423zm-.417 6.901c-.278.563-.556 1.127-.973 1.549s-.834.704-1.391.986-1.251.422-1.946.422-1.391-.141-1.947-.422-1.112-.563-1.39-.986c-.417-.422-.695-.986-.973-1.549s-.278-1.268-.278-1.972.139-1.408.278-1.972c.278-.563.556-1.127.973-1.549s.834-.704 1.39-.986 1.252-.423 1.947-.423 1.39.141 1.946.423 1.113.563 1.391.986c.417.423.695.986.973 1.549s.278 1.268.278 1.972c.139.704 0 1.409-.278 1.972zm17.104-6.901c-.556-.704-1.251-1.268-2.225-1.69-.834-.423-1.808-.563-2.781-.563-1.112 0-2.086.141-2.92.563s-1.669.845-2.225 1.549-1.112 1.408-1.39 2.394c-.279.845-.557 1.831-.557 2.958 0 .986.139 1.972.557 2.817s.834 1.69 1.529 2.253c.695.704 1.391 1.127 2.225 1.549s1.808.563 2.781.563 1.947-.141 2.781-.563a5.21 5.21 0 0 0 2.086-1.831h.139v2.113c0 .563-.139 1.127-.278 1.831-.139.563-.417 1.127-.834 1.549s-.835.845-1.53 1.127-1.53.422-2.503.422-1.947-.141-2.781-.563-1.53-.986-2.225-1.69l-1.808 2.253c.974.986 2.086 1.549 3.199 1.972s2.364.563 3.615.563c1.112 0 2.225-.141 3.059-.563.974-.422 1.808-.845 2.364-1.549.695-.704 1.252-1.408 1.53-2.253.417-.845.556-1.831.556-2.958V8.732h-2.642v1.69h.278zm-1.251 8.451c-.835.845-1.947 1.268-3.338 1.268s-2.503-.422-3.337-1.268-1.252-1.972-1.252-3.38c0-.704.139-1.408.279-1.972.278-.563.556-1.127.973-1.549s.834-.704 1.39-.986 1.252-.423 1.947-.423 1.391.141 1.947.423 1.112.563 1.391.986c.417.423.695.986.973 1.549s.278 1.268.278 1.972c0 1.408-.417 2.535-1.251 3.38zm18.633-8.451c-.556-.704-1.251-1.268-2.086-1.69s-1.947-.704-3.198-.704c-1.113 0-1.947.141-2.92.563-.835.423-1.669.845-2.225 1.549-.695.704-1.113 1.409-1.53 2.394-.417.845-.556 1.831-.556 2.958 0 .986.139 1.972.556 2.958.278.845.835 1.69 1.391 2.394s1.39 1.127 2.225 1.549 1.946.563 3.059.563c2.642 0 4.728-.986 6.257-2.817l-2.085-1.831c-.557.704-1.113 1.127-1.669 1.549s-1.391.563-2.086.563c-.556 0-1.251-.141-1.808-.282s-1.112-.422-1.529-.845-.835-.845-1.113-1.268a3.79 3.79 0 0 1-.417-1.69h11.264v-.845c0-.845-.139-1.831-.418-2.676-.139-.986-.556-1.69-1.112-2.394zm-9.734 3.803c0-.423.139-.986.278-1.408s.556-.986.835-1.268c.417-.423.834-.704 1.39-.986a3.81 3.81 0 0 1 1.808-.423c.695 0 1.251.141 1.808.282s.973.563 1.251.845c.278.422.556.845.695 1.268.139.563.278.986.278 1.69h-8.343zm30.592 3.944c-.695.422-1.529.563-2.503.563-.834 0-1.529-.141-2.364-.563-.695-.282-1.39-.845-1.807-1.408-.557-.563-.974-1.268-1.252-2.113a8.3 8.3 0 0 1-.417-2.676 8.3 8.3 0 0 1 .417-2.676c.278-.845.695-1.549 1.252-2.113s1.112-1.127 1.946-1.408c.696-.282 1.53-.563 2.364-.563s1.669.141 2.225.423c.695.282 1.252.704 1.669 1.268l3.476-2.958c-.417-.563-.973-1.127-1.529-1.408-.557-.423-1.113-.704-1.808-.986-.556-.282-1.252-.423-1.947-.563s-1.251-.141-1.808-.141c-1.668 0-3.198.282-4.449.704a12.06 12.06 0 0 0-3.616 2.254c-.973.986-1.807 2.113-2.364 3.521s-.834 2.958-.834 4.507c0 1.69.278 3.239.834 4.507.557 1.409 1.391 2.535 2.364 3.521s2.225 1.69 3.616 2.253 2.92.704 4.449.704c1.391 0 2.782-.282 4.172-.845s2.503-1.549 3.337-2.817l-3.893-2.817c-.139.845-.834 1.409-1.53 1.831zm32.539-10.563c-.973 0-1.808.282-2.503.704s-1.251 1.127-1.668 1.972V8.028h-4.45v14.366h4.45v-6.056c0-.704 0-1.408.139-1.972s.278-1.127.556-1.549.695-.704 1.112-.986c.556-.282 1.113-.422 1.947-.422.278 0 .695 0 .973.141.279 0 .557.141.974.282V7.746c-.278 0-.417-.141-.695-.141h-.835zm16.965 7.606c0-1.127-.139-2.254-.417-3.099a5.18 5.18 0 0 0-1.391-2.394c-.556-.704-1.251-1.127-2.086-1.549s-1.807-.563-2.781-.563c-1.112 0-2.086.141-3.059.563s-1.808.845-2.503 1.549-1.251 1.408-1.669 2.394-.556 1.972-.556 3.099.139 2.254.556 3.099c.418.986.974 1.69 1.669 2.394s1.53 1.127 2.503 1.549 1.947.563 3.059.563 2.225-.282 3.338-.704A8.03 8.03 0 0 0 208.166 20l-3.06-2.253c-.417.563-.834.986-1.251 1.268-.556.282-1.112.422-1.808.422-.834 0-1.529-.282-2.225-.704-.556-.563-.973-1.127-1.112-1.972H209v-1.549h-.139zm-10.29-1.549c0-.423.139-.704.278-.986s.417-.563.556-.845c.278-.282.556-.422.974-.563s.834-.282 1.251-.282c.834 0 1.53.282 1.947.845.556.563.695 1.127.695 1.972h-5.701v-.141z" fill="#fff"></path></svg></div></a><!--]--><!--]--></div><div class="apl-container apl-container--no-padding apl-platform-header__navigation-mobile-container" data-v-5f67d08b><!--[--><div data-v-5f67d08b><button tabindex="0" type="button" class="apl-button apl-button--secondary apl-button--md apl-button--icon-only apl-platform-header__button apl-platform-header__button--icon" aria-label="Log in by institution"><!----><!--[--><!----><!--]--><div class="apl-icon apl-icon--institution apl-button__icon--center" style="color:;" tabindex="-1" data-v-27c0a44c><!----></div><!----></button></div><div class="apl-search apl-search--collapse apl-search--md apl-platform-header__search" data-v-aec9f5e2 data-v-5f67d08b><input class="apl-search__input" type="search" placeholder="Search..." autocomplete="off" value data-v-aec9f5e2><button tabindex="0" type="button" class="apl-button apl-button--primary apl-button--md apl-button--icon-only apl-search__btn-search apl-search__btn-search--mobile" aria-label="Search" data-v-3dc43ba0 data-v-aec9f5e2><!----><!--[--><!----><!--]--><div class="apl-icon apl-icon--search apl-button__icon--center" style="color:;" tabindex="-1" data-v-27c0a44c><!----></div><!----></button><button tabindex="0" type="button" class="apl-button apl-button--primary apl-button--md apl-search__btn-search apl-search__btn-search--desktop" aria-label="Search" data-v-28a98aec data-v-aec9f5e2><div class="apl-icon apl-icon--search apl-button__icon--left" style="color:;" tabindex="-1" data-v-27c0a44c><!----></div><!--[--><span>Search</span><!--]--><!----><!----></button><!----></div><button tabindex="0" type="button" class="apl-button apl-button--secondary apl-button--md apl-button--icon-only apl-platform-header__button apl-platform-header__button--icon apl-menu__options--mobile" aria-label="Expand menu options" aria-expanded="false" id="apl-menu__mobile-menu-toggle" data-v-b4a8da84 data-v-5f67d08b><!----><!--[--><!----><!--]--><div class="apl-icon apl-icon--menu apl-button__icon--center" style="color:;" tabindex="-1" data-v-27c0a44c><!----></div><!----></button><!--]--></div><!--]--></div><div class="apl-container apl-platform-header__container apl-platform-header__container--desktop" data-v-b46fab2c data-v-cabeb8f7><!--[--><div class="apl-container apl-container--no-padding apl-platform-header__logo-and-menu-container" data-v-b46fab2c><!--[--><div class="apl-platform-header__branding" data-v-b46fab2c><!--[--><!--[--><a href="/core/" class="platform-logo" title="Cambridge Core homepage"><div><svg xmlns="http://www.w3.org/2000/svg" viewbox="0 0 209 20" fill="none" role="img" width="140" height="30"><title> Logo for Cambridge Core from Cambridge University Press. Click to return to homepage. </title><path d="M173.541 7.465c-4.172 0-7.648 3.521-7.648 7.747s3.476 7.746 7.648 7.746 7.648-3.521 7.648-7.746-3.337-7.747-7.648-7.747zm0 11.408c-2.086 0-3.616-1.69-3.616-3.662s1.669-3.662 3.616-3.662c2.086 0 3.615 1.69 3.615 3.662.139 2.113-1.529 3.662-3.615 3.662zm-159.357.704c-.973.563-1.947.704-3.198.704a8.31 8.31 0 0 1-3.337-.704c-.973-.423-1.808-.986-2.503-1.831-.695-.704-1.112-1.69-1.53-2.676s-.556-1.972-.556-3.099.139-2.253.556-3.38.834-1.972 1.53-2.676 1.53-1.408 2.503-1.831 2.086-.704 3.198-.704c.973 0 1.947.141 2.781.563.973.423 1.669.986 2.225 1.831l2.364-1.831c-.834-1.127-1.947-1.972-3.198-2.394-1.391-.563-2.642-.845-4.033-.845-1.669 0-3.059.282-4.45.845S4.033 2.817 3.059 3.944C2.086 4.93 1.391 6.197.834 7.465.278 8.873 0 10.282 0 11.972c0 1.268.278 2.676.695 3.944.556 1.268 1.251 2.394 2.086 3.521.973.986 2.086 1.831 3.337 2.535 1.391.563 2.781.986 4.45.986 1.252 0 2.225-.141 3.198-.422s1.669-.704 2.364-1.127 1.112-.845 1.53-1.268.695-.704.834-.986l-2.364-1.831c-.278.986-.973 1.69-1.947 2.254zm18.773.282v-7.183c0-.563-.139-1.268-.278-1.831-.278-.563-.556-1.127-1.112-1.549-.417-.423-1.112-.845-1.808-.986-.695-.282-1.669-.423-2.642-.423s-1.947.141-2.92.563c-.973.282-1.808.845-2.642 1.549l1.53 1.69c.278-.141.417-.422.695-.563s.556-.423.834-.563.695-.282 1.112-.423S26.56 10 27.116 10s1.112.141 1.53.282.695.422.973.704.417.563.556.986.139.704.139 1.127v.563h-1.947c-1.113 0-2.086.141-3.059.282s-1.808.422-2.503.845-1.251.845-1.669 1.549c-.417.563-.556 1.409-.556 2.254 0 .704.139 1.268.417 1.831s.556.986 1.113 1.268c.417.423.973.563 1.53.845.556.141 1.252.282 1.808.282 1.112 0 1.947-.141 2.781-.563s1.53-.986 2.086-1.972c0 .704 0 1.408.139 2.113h2.364c0-.423 0-.704-.139-1.127.417-.423.278-.845.278-1.408zm-2.364-3.239c0 .423 0 .986-.139 1.408s-.417.986-.695 1.268c-.278.422-.695.704-1.251.986s-1.112.422-1.808.422c-.973 0-1.669-.141-2.225-.563s-.834-.986-.834-1.69c0-.563.139-.986.556-1.408.278-.423.834-.563 1.391-.845.556-.141 1.112-.282 1.808-.423.695 0 1.252-.141 1.947-.141h1.391v.986h-.139zM56.039 10c-.417-.563-.973-.986-1.669-1.408s-1.53-.563-2.642-.563c-.834 0-1.669.282-2.503.704s-1.53 1.127-1.947 1.972c-.417-.845-.973-1.549-1.808-1.972-.695-.423-1.53-.704-2.642-.704-.695 0-1.251.141-1.669.282-.556.141-.973.423-1.391.704s-.695.563-.973.845-.417.563-.417.704V8.31h-2.503v14.085h2.642V15.07c0-.845.139-1.549.278-2.113s.417-1.127.834-1.408c.278-.423.695-.704 1.113-.845s.834-.282 1.39-.282c.695 0 1.113.141 1.53.282.417.282.695.563.834.986s.278.845.417 1.409c0 .563.139 1.127.139 1.831v7.324h2.642v-7.746c0-1.268.278-2.253.834-3.098s1.53-1.127 2.781-1.127c.695 0 1.112.141 1.53.282.417.282.695.563.973.845.278.422.417.845.417 1.268 0 .563.139.986.139 1.549v7.887h2.642v-8.169c0-.845-.139-1.549-.278-2.254 0-.422-.278-1.127-.695-1.69zm16.826.141c-.556-.704-1.391-1.127-2.225-1.549s-1.808-.563-2.92-.563c-.973 0-1.947.282-2.781.704s-1.53.986-2.086 1.69V0h-2.642v22.394h2.642v-1.972a6.17 6.17 0 0 0 2.086 1.69c.834.422 1.808.563 2.781.563 1.112 0 2.086-.141 2.92-.563s1.669-.845 2.225-1.549 1.112-1.408 1.391-2.394c.278-.845.556-1.831.556-2.958 0-.986-.139-1.972-.556-2.958-.278-.704-.695-1.408-1.391-2.113zm-1.252 7.183c-.278.563-.556 1.127-.973 1.549s-.834.704-1.391.986-1.251.422-1.947.422-1.391-.141-1.947-.422-1.112-.563-1.391-.986c-.417-.422-.695-.986-.973-1.549s-.278-1.268-.278-1.972.139-1.408.278-1.972c.278-.563.556-1.127.973-1.549s.834-.704 1.391-.986 1.252-.423 1.947-.423 1.391.141 1.947.423 1.112.563 1.391.986c.417.423.695.986.973 1.549s.278 1.268.278 1.972c.139.704 0 1.409-.278 1.972zM82.46 8.169c-.417.141-.834.282-1.112.563-.417.282-.695.423-.973.845-.278.282-.556.704-.695.986V8.451h-2.642v14.085h2.642v-7.042c0-1.549.278-2.676.973-3.38s1.669-1.127 2.92-1.127h.695c.278 0 .417.141.695.141l.139-2.817c-.417-.141-.695-.141-1.112-.141a2.82 2.82 0 0 0-1.53 0zm5.145 14.225h2.642V8.31h-2.642v14.085zm1.252-20.986a1.77 1.77 0 0 0-1.391.563c-.278.423-.556.845-.556 1.408s.139.986.556 1.408a1.77 1.77 0 0 0 1.391.563 1.77 1.77 0 0 0 1.39-.563c.417-.423.556-.845.556-1.408s-.139-.986-.556-1.408a1.77 1.77 0 0 0-1.39-.563zm15.574 9.014a6.17 6.17 0 0 0-2.085-1.69c-.835-.423-1.808-.704-2.781-.704-1.112 0-2.086.141-2.92.563s-1.669.845-2.225 1.549-1.112 1.409-1.391 2.394c-.278.845-.556 1.831-.556 2.958 0 .986.139 1.972.556 2.958.278.845.834 1.69 1.391 2.394s1.391 1.268 2.225 1.549c.834.422 1.808.563 2.92.563.974 0 1.807-.141 2.781-.563a6.17 6.17 0 0 0 2.085-1.69v1.972h2.643V0h-2.643v10.423zm-.417 6.901c-.278.563-.556 1.127-.973 1.549s-.834.704-1.391.986-1.251.422-1.946.422-1.391-.141-1.947-.422-1.112-.563-1.39-.986c-.417-.422-.695-.986-.973-1.549s-.278-1.268-.278-1.972.139-1.408.278-1.972c.278-.563.556-1.127.973-1.549s.834-.704 1.39-.986 1.252-.423 1.947-.423 1.39.141 1.946.423 1.113.563 1.391.986c.417.423.695.986.973 1.549s.278 1.268.278 1.972c.139.704 0 1.409-.278 1.972zm17.104-6.901c-.556-.704-1.251-1.268-2.225-1.69-.834-.423-1.808-.563-2.781-.563-1.112 0-2.086.141-2.92.563s-1.669.845-2.225 1.549-1.112 1.408-1.39 2.394c-.279.845-.557 1.831-.557 2.958 0 .986.139 1.972.557 2.817s.834 1.69 1.529 2.253c.695.704 1.391 1.127 2.225 1.549s1.808.563 2.781.563 1.947-.141 2.781-.563a5.21 5.21 0 0 0 2.086-1.831h.139v2.113c0 .563-.139 1.127-.278 1.831-.139.563-.417 1.127-.834 1.549s-.835.845-1.53 1.127-1.53.422-2.503.422-1.947-.141-2.781-.563-1.53-.986-2.225-1.69l-1.808 2.253c.974.986 2.086 1.549 3.199 1.972s2.364.563 3.615.563c1.112 0 2.225-.141 3.059-.563.974-.422 1.808-.845 2.364-1.549.695-.704 1.252-1.408 1.53-2.253.417-.845.556-1.831.556-2.958V8.732h-2.642v1.69h.278zm-1.251 8.451c-.835.845-1.947 1.268-3.338 1.268s-2.503-.422-3.337-1.268-1.252-1.972-1.252-3.38c0-.704.139-1.408.279-1.972.278-.563.556-1.127.973-1.549s.834-.704 1.39-.986 1.252-.423 1.947-.423 1.391.141 1.947.423 1.112.563 1.391.986c.417.423.695.986.973 1.549s.278 1.268.278 1.972c0 1.408-.417 2.535-1.251 3.38zm18.633-8.451c-.556-.704-1.251-1.268-2.086-1.69s-1.947-.704-3.198-.704c-1.113 0-1.947.141-2.92.563-.835.423-1.669.845-2.225 1.549-.695.704-1.113 1.409-1.53 2.394-.417.845-.556 1.831-.556 2.958 0 .986.139 1.972.556 2.958.278.845.835 1.69 1.391 2.394s1.39 1.127 2.225 1.549 1.946.563 3.059.563c2.642 0 4.728-.986 6.257-2.817l-2.085-1.831c-.557.704-1.113 1.127-1.669 1.549s-1.391.563-2.086.563c-.556 0-1.251-.141-1.808-.282s-1.112-.422-1.529-.845-.835-.845-1.113-1.268a3.79 3.79 0 0 1-.417-1.69h11.264v-.845c0-.845-.139-1.831-.418-2.676-.139-.986-.556-1.69-1.112-2.394zm-9.734 3.803c0-.423.139-.986.278-1.408s.556-.986.835-1.268c.417-.423.834-.704 1.39-.986a3.81 3.81 0 0 1 1.808-.423c.695 0 1.251.141 1.808.282s.973.563 1.251.845c.278.422.556.845.695 1.268.139.563.278.986.278 1.69h-8.343zm30.592 3.944c-.695.422-1.529.563-2.503.563-.834 0-1.529-.141-2.364-.563-.695-.282-1.39-.845-1.807-1.408-.557-.563-.974-1.268-1.252-2.113a8.3 8.3 0 0 1-.417-2.676 8.3 8.3 0 0 1 .417-2.676c.278-.845.695-1.549 1.252-2.113s1.112-1.127 1.946-1.408c.696-.282 1.53-.563 2.364-.563s1.669.141 2.225.423c.695.282 1.252.704 1.669 1.268l3.476-2.958c-.417-.563-.973-1.127-1.529-1.408-.557-.423-1.113-.704-1.808-.986-.556-.282-1.252-.423-1.947-.563s-1.251-.141-1.808-.141c-1.668 0-3.198.282-4.449.704a12.06 12.06 0 0 0-3.616 2.254c-.973.986-1.807 2.113-2.364 3.521s-.834 2.958-.834 4.507c0 1.69.278 3.239.834 4.507.557 1.409 1.391 2.535 2.364 3.521s2.225 1.69 3.616 2.253 2.92.704 4.449.704c1.391 0 2.782-.282 4.172-.845s2.503-1.549 3.337-2.817l-3.893-2.817c-.139.845-.834 1.409-1.53 1.831zm32.539-10.563c-.973 0-1.808.282-2.503.704s-1.251 1.127-1.668 1.972V8.028h-4.45v14.366h4.45v-6.056c0-.704 0-1.408.139-1.972s.278-1.127.556-1.549.695-.704 1.112-.986c.556-.282 1.113-.422 1.947-.422.278 0 .695 0 .973.141.279 0 .557.141.974.282V7.746c-.278 0-.417-.141-.695-.141h-.835zm16.965 7.606c0-1.127-.139-2.254-.417-3.099a5.18 5.18 0 0 0-1.391-2.394c-.556-.704-1.251-1.127-2.086-1.549s-1.807-.563-2.781-.563c-1.112 0-2.086.141-3.059.563s-1.808.845-2.503 1.549-1.251 1.408-1.669 2.394-.556 1.972-.556 3.099.139 2.254.556 3.099c.418.986.974 1.69 1.669 2.394s1.53 1.127 2.503 1.549 1.947.563 3.059.563 2.225-.282 3.338-.704A8.03 8.03 0 0 0 208.166 20l-3.06-2.253c-.417.563-.834.986-1.251 1.268-.556.282-1.112.422-1.808.422-.834 0-1.529-.282-2.225-.704-.556-.563-.973-1.127-1.112-1.972H209v-1.549h-.139zm-10.29-1.549c0-.423.139-.704.278-.986s.417-.563.556-.845c.278-.282.556-.422.974-.563s.834-.282 1.251-.282c.834 0 1.53.282 1.947.845.556.563.695 1.127.695 1.972h-5.701v-.141z" fill="#fff"></path></svg></div></a><!--]--><!--]--></div><hr class="apl-platform-header__logo-and-menu-divider" data-v-b46fab2c><nav data-v-b46fab2c><div class="apl-container apl-container--no-padding apl-menu__options--desktop" data-v-1e1e1426 data-v-b46fab2c><!--[--><ul class="apl-menu__list" data-v-1e1e1426><!--[--><li id="menu_item-0" class="apl-menu__list-item" data-v-1e1e1426><button tabindex="0" type="button" class="apl-button apl-button--secondary apl-button--md apl-button--text apl-platform-header__button apl-menu__item"><!----><!--[--><span>Browse</span><!--]--><!----><div class="apl-icon apl-icon--chevron-down apl-button__icon--right" style="color:;" tabindex="-1" data-v-27c0a44c><!----></div></button></li><li id="menu_item-1" class="apl-menu__list-item" data-v-1e1e1426><button tabindex="0" type="button" class="apl-button apl-button--secondary apl-button--md apl-button--text apl-platform-header__button apl-menu__item"><!----><!--[--><span>Services</span><!--]--><!----><div class="apl-icon apl-icon--chevron-down apl-button__icon--right" style="color:;" tabindex="-1" data-v-27c0a44c><!----></div></button></li><li id="menu_item-2" class="apl-menu__list-item" data-v-1e1e1426><button tabindex="0" type="button" class="apl-button apl-button--secondary apl-button--md apl-button--text apl-platform-header__button apl-menu__item"><!----><!--[--><span>Open research</span><!--]--><!----><div class="apl-icon apl-icon--chevron-down apl-button__icon--right" style="color:;" tabindex="-1" data-v-27c0a44c><!----></div></button></li><!--]--></ul><!--]--></div></nav><!--]--></div><div class="apl-container apl-container--no-padding apl-platform-header__account-and-search-container" data-v-b46fab2c><!--[--><div class="apl-platform-header__authenticated-by" data-v-b46fab2c><button tabindex="0" type="button" class="apl-button apl-button--secondary apl-button--md apl-button--text apl-platform-header__button"><!----><!--[--><span>Institution Login</span><!--]--><!----><!----></button></div><div class="apl-search apl-search--collapse apl-search--md apl-platform-header__search" data-v-aec9f5e2 data-v-b46fab2c><input class="apl-search__input" type="search" placeholder="Search..." autocomplete="off" value data-v-aec9f5e2><button tabindex="0" type="button" class="apl-button apl-button--primary apl-button--md apl-button--icon-only apl-search__btn-search apl-search__btn-search--mobile" aria-label="Search" data-v-3dc43ba0 data-v-aec9f5e2><!----><!--[--><!----><!--]--><div class="apl-icon apl-icon--search apl-button__icon--center" style="color:;" tabindex="-1" data-v-27c0a44c><!----></div><!----></button><button tabindex="0" type="button" class="apl-button apl-button--primary apl-button--md apl-search__btn-search apl-search__btn-search--desktop" aria-label="Search" data-v-28a98aec data-v-aec9f5e2><div class="apl-icon apl-icon--search apl-button__icon--left" style="color:;" tabindex="-1" data-v-27c0a44c><!----></div><!--[--><span>Search</span><!--]--><!----><!----></button><!----></div><!--]--></div><!--]--></div><!--]--></div><!----><!----></div><!----></section></div><!----><!----><!----><!--]--></div> </div></div><script>window.__PLATFORM_HEADER_DATA__ = {"megaMenuData":{"menuItems":[{"id":"menu_item-0","label":"Browse","openExternal":null,"url":"","isMegaMenu":true,"menuCats":[{"id":"menu_item-0_cat-0","label":"Subjects","openExternal":null,"url":"/core/browse-subjects","menuTopics":[{"id":"menu_item-0_cat-0_subCat-0","label":" Subjects (A-D)","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-0_cat-0_subCat-0_link-0","label":"Anthropology","openExternal":false,"url":"/core/browse-subjects/anthropology"},{"id":"menu_item-0_cat-0_subCat-0_link-1","label":"Archaeology","openExternal":false,"url":"/core/browse-subjects/archaeology"},{"id":"menu_item-0_cat-0_subCat-0_link-2","label":"Area Studies","openExternal":false,"url":"/core/browse-subjects/area-studies"},{"id":"menu_item-0_cat-0_subCat-0_link-3","label":"Art","openExternal":false,"url":"/core/browse-subjects/art"},{"id":"menu_item-0_cat-0_subCat-0_link-4","label":"Chemistry","openExternal":false,"url":"/core/browse-subjects/chemistry"},{"id":"menu_item-0_cat-0_subCat-0_link-5","label":"Classical Studies","openExternal":false,"url":"/core/browse-subjects/classical-studies"},{"id":"menu_item-0_cat-0_subCat-0_link-6","label":"Computer Science","openExternal":false,"url":"/core/browse-subjects/computer-science"},{"id":"menu_item-0_cat-0_subCat-0_link-7","label":"Drama, Theatre, Performance Studies","openExternal":false,"url":"/core/browse-subjects/drama-and-theatre"}]},{"id":"menu_item-0_cat-0_subCat-1","label":" Subjects (E-K)","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-0_cat-0_subCat-1_link-0","label":"Earth and Environmental Science","openExternal":false,"url":"/core/browse-subjects/earth-and-environmental-sciences"},{"id":"menu_item-0_cat-0_subCat-1_link-1","label":"Economics","openExternal":false,"url":"/core/browse-subjects/economics"},{"id":"menu_item-0_cat-0_subCat-1_link-2","label":"Education","openExternal":false,"url":"/core/browse-subjects/education"},{"id":"menu_item-0_cat-0_subCat-1_link-3","label":"Engineering","openExternal":false,"url":"/core/browse-subjects/engineering"},{"id":"menu_item-0_cat-0_subCat-1_link-4","label":"English Language Teaching – Resources for Teachers","openExternal":false,"url":"/core/browse-subjects/english-language-teaching-resources-for-teachers"},{"id":"menu_item-0_cat-0_subCat-1_link-5","label":"Film, Media, Mass Communication","openExternal":false,"url":"/core/browse-subjects/film-media-mass-ommunication"},{"id":"menu_item-0_cat-0_subCat-1_link-6","label":"General Science","openExternal":false,"url":"/core/browse-subjects/general-science"},{"id":"menu_item-0_cat-0_subCat-1_link-7","label":"Geography","openExternal":false,"url":"/core/browse-subjects/geography"},{"id":"menu_item-0_cat-0_subCat-1_link-8","label":"History","openExternal":false,"url":"/core/browse-subjects/history"}]},{"id":"menu_item-0_cat-0_subCat-2","label":" Subjects (L-O)","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-0_cat-0_subCat-2_link-0","label":"Language and Linguistics","openExternal":false,"url":"/core/browse-subjects/language-and-linguistics"},{"id":"menu_item-0_cat-0_subCat-2_link-1","label":"Law","openExternal":false,"url":"/core/browse-subjects/law"},{"id":"menu_item-0_cat-0_subCat-2_link-2","label":"Life Sciences","openExternal":false,"url":"/core/browse-subjects/life-sciences"},{"id":"menu_item-0_cat-0_subCat-2_link-3","label":"Literature","openExternal":false,"url":"/core/browse-subjects/literature"},{"id":"menu_item-0_cat-0_subCat-2_link-4","label":"Management","openExternal":false,"url":"/core/browse-subjects/management"},{"id":"menu_item-0_cat-0_subCat-2_link-5","label":"Materials Science","openExternal":false,"url":"/core/browse-subjects/materials-science"},{"id":"menu_item-0_cat-0_subCat-2_link-6","label":"Mathematics","openExternal":false,"url":"/core/browse-subjects/mathematics"},{"id":"menu_item-0_cat-0_subCat-2_link-7","label":"Medicine","openExternal":false,"url":"/core/browse-subjects/medicine"},{"id":"menu_item-0_cat-0_subCat-2_link-8","label":"Music","openExternal":false,"url":"/core/browse-subjects/music"},{"id":"menu_item-0_cat-0_subCat-2_link-9","label":"Nutrition","openExternal":false,"url":"/core/browse-subjects/nutrition"}]},{"id":"menu_item-0_cat-0_subCat-3","label":" Subjects (P-Z)","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-0_cat-0_subCat-3_link-0","label":"Philosophy","openExternal":false,"url":"/core/browse-subjects/philosophy"},{"id":"menu_item-0_cat-0_subCat-3_link-1","label":"Physics and Astronomy","openExternal":false,"url":"/core/browse-subjects/physics"},{"id":"menu_item-0_cat-0_subCat-3_link-2","label":"Politics and International Relations","openExternal":false,"url":"/core/browse-subjects/politics-and-international-relations"},{"id":"menu_item-0_cat-0_subCat-3_link-3","label":"Psychiatry","openExternal":false,"url":"/core/browse-subjects/psychiatry"},{"id":"menu_item-0_cat-0_subCat-3_link-4","label":"Psychology","openExternal":false,"url":"/core/browse-subjects/psychology"},{"id":"menu_item-0_cat-0_subCat-3_link-5","label":"Religion","openExternal":false,"url":"/core/browse-subjects/religion"},{"id":"menu_item-0_cat-0_subCat-3_link-6","label":"Social Science Research Methods","openExternal":false,"url":"/core/browse-subjects/social-science-research-methods"},{"id":"menu_item-0_cat-0_subCat-3_link-7","label":"Sociology","openExternal":false,"url":"/core/browse-subjects/sociology"},{"id":"menu_item-0_cat-0_subCat-3_link-8","label":"Statistics and Probability","openExternal":false,"url":"/core/browse-subjects/statistics-and-probability"}]}]},{"id":"menu_item-0_cat-1","label":"Open access","openExternal":null,"url":"/core/publications/open-access","menuTopics":[{"id":"menu_item-0_cat-1_subCat-0","label":"All open access publishing","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-0_cat-1_subCat-0_link-0","label":"Open access","openExternal":false,"url":"/core/publications/open-access"},{"id":"menu_item-0_cat-1_subCat-0_link-1","label":"Open access journals","openExternal":false,"url":"/core/publications/open-access/listing?aggs[productTypes][filters]=JOURNAL&statuses=PUBLISHED&sort=titleSort:asc"},{"id":"menu_item-0_cat-1_subCat-0_link-2","label":"Research open journals","openExternal":false,"url":"/core/publications/open-access/research-open?aggs[productTypes][filters]=JOURNAL&statuses=PUBLISHED&sort=titleSort:asc"},{"id":"menu_item-0_cat-1_subCat-0_link-3","label":"Journals containing open access","openExternal":false,"url":"/core/publications/open-access/hybrid-open-access-journals?aggs[productTypes][filters]=JOURNAL&statuses=PUBLISHED&sort=titleSort:asc"},{"id":"menu_item-0_cat-1_subCat-0_link-4","label":"Open access articles","openExternal":false,"url":"/core/publications/open-access/listing?aggs[productTypes][filters]=JOURNAL_ARTICLE"},{"id":"menu_item-0_cat-1_subCat-0_link-5","label":"Open access books","openExternal":false,"url":"/core/publications/open-access/listing?aggs[productTypes][filters]=BOOK&sort=canonical.date:desc"},{"id":"menu_item-0_cat-1_subCat-0_link-6","label":"Open access Elements","openExternal":false,"url":"/core/publications/elements/published-elements?aggs%5BopenAccess%5D%5Bfilters%5D=7275BA1E84CA769210167A6A66523B47&aggs%5BproductTypes%5D%5Bfilters%5D=ELEMENT&searchWithinIds=ECFD8F5C64F47F3F5A3D395C15B7C493"}]}]},{"id":"menu_item-0_cat-2","label":"Journals","openExternal":null,"url":"/core/publications/journals","menuTopics":[{"id":"menu_item-0_cat-2_subCat-0","label":"Explore","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-0_cat-2_subCat-0_link-0","label":"All journal subjects","openExternal":false,"url":"/core/publications/journals"},{"id":"menu_item-0_cat-2_subCat-0_link-1","label":"Search journals","openExternal":false,"url":"/core/publications/journals"}]},{"id":"menu_item-0_cat-2_subCat-1","label":"Open access","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-0_cat-2_subCat-1_link-0","label":"Open access journals","openExternal":false,"url":"/core/publications/open-access/listing?aggs[productTypes][filters]=JOURNAL&statuses=PUBLISHED&sort=titleSort:asc"},{"id":"menu_item-0_cat-2_subCat-1_link-1","label":"Research open journals","openExternal":false,"url":"/core/publications/open-access/research-open?aggs[productTypes][filters]=JOURNAL&statuses=PUBLISHED&sort=titleSort:asc"},{"id":"menu_item-0_cat-2_subCat-1_link-2","label":"Journals containing open access","openExternal":false,"url":"/core/publications/open-access/hybrid-open-access-journals?aggs[productTypes][filters]=JOURNAL&statuses=PUBLISHED&sort=titleSort:asc"},{"id":"menu_item-0_cat-2_subCat-1_link-3","label":"Open access articles","openExternal":false,"url":"/core/publications/open-access/listing?aggs[productTypes][filters]=JOURNAL_ARTICLE"}]},{"id":"menu_item-0_cat-2_subCat-2","label":"Collections","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-0_cat-2_subCat-2_link-0","label":"Cambridge Forum","openExternal":false,"url":"/core/publications/collections/cambridge-forum"},{"id":"menu_item-0_cat-2_subCat-2_link-1","label":"Cambridge Law Reports Collection","openExternal":false,"url":"/core/publications/collections/cambridge-law-reports-collection"},{"id":"menu_item-0_cat-2_subCat-2_link-2","label":"Cambridge Prisms","openExternal":false,"url":"/core/publications/collections/cambridge-prisms"},{"id":"menu_item-0_cat-2_subCat-2_link-3","label":"Research Directions","openExternal":false,"url":"/core/publications/collections/research-directions"}]}]},{"id":"menu_item-0_cat-3","label":"Books","openExternal":null,"url":"/core/publications/books","menuTopics":[{"id":"menu_item-0_cat-3_subCat-0","label":"Explore","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-0_cat-3_subCat-0_link-0","label":"Books","openExternal":false,"url":"/core/publications/books"},{"id":"menu_item-0_cat-3_subCat-0_link-1","label":"Open access books","openExternal":false,"url":"/core/publications/open-access/listing?aggs[productTypes][filters]=BOOK&sort=canonical.date:desc"},{"id":"menu_item-0_cat-3_subCat-0_link-2","label":"New books","openExternal":false,"url":"/core/publications/books/listing?aggs[productDate][filters]=Last+3+months&aggs[productTypes][filters]=BOOK&sort=canonical.date:desc"},{"id":"menu_item-0_cat-3_subCat-0_link-3","label":"Flip it Open","openExternal":false,"url":"/core/publications/collections/flip-it-open"}]},{"id":"menu_item-0_cat-3_subCat-1","label":"Collections","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-0_cat-3_subCat-1_link-0","label":"Cambridge Companions","openExternal":false,"url":"/core/publications/collections/cambridge-companions"},{"id":"menu_item-0_cat-3_subCat-1_link-1","label":"Cambridge Editions","openExternal":false,"url":"/core/publications/collections/cambridge-editions"},{"id":"menu_item-0_cat-3_subCat-1_link-2","label":"Cambridge Histories","openExternal":false,"url":"/core/publications/collections/cambridge-histories"},{"id":"menu_item-0_cat-3_subCat-1_link-3","label":"Cambridge Library Collection","openExternal":false,"url":"/core/publications/collections/cambridge-library-collection"},{"id":"menu_item-0_cat-3_subCat-1_link-4","label":"Cambridge Shakespeare","openExternal":false,"url":"/core/publications/collections/cambridge-shakespeare"},{"id":"menu_item-0_cat-3_subCat-1_link-5","label":"Cambridge Handbooks","openExternal":false,"url":"/core/publications/collections/cambridgehandbooks"}]},{"id":"menu_item-0_cat-3_subCat-2","label":" Collections (cont.)","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-0_cat-3_subCat-2_link-0","label":"Dispute Settlement Reports Online","openExternal":false,"url":"/core/publications/collections/dispute-settlement-reports-online"},{"id":"menu_item-0_cat-3_subCat-2_link-1","label":"Flip it Open","openExternal":false,"url":"/core/publications/collections/flip-it-open"},{"id":"menu_item-0_cat-3_subCat-2_link-2","label":"Hemingway Letters","openExternal":false,"url":"/core/publications/collections/hemingway-letters"},{"id":"menu_item-0_cat-3_subCat-2_link-3","label":"Shakespeare Survey","openExternal":false,"url":"/core/publications/collections/shakespeare-survey"},{"id":"menu_item-0_cat-3_subCat-2_link-4","label":"Stahl Online","openExternal":false,"url":"/core/publications/collections/stahl-online"},{"id":"menu_item-0_cat-3_subCat-2_link-5","label":"The Correspondence of Isaac Newton","openExternal":false,"url":"/core/publications/collections/the-correspondence-of-isaac-newton"}]}]},{"id":"menu_item-0_cat-4","label":"Elements","openExternal":null,"url":"/core/publications/elements","menuTopics":[{"id":"menu_item-0_cat-4_subCat-0","label":"Explore","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-0_cat-4_subCat-0_link-0","label":"About Elements","openExternal":false,"url":"/core/publications/elements"},{"id":"menu_item-0_cat-4_subCat-0_link-1","label":"Elements series","openExternal":false,"url":"/core/publications/elements/cambridge-elements-series"},{"id":"menu_item-0_cat-4_subCat-0_link-2","label":"Open access Elements","openExternal":false,"url":"/core/publications/elements/published-elements?aggs%5BopenAccess%5D%5Bfilters%5D=7275BA1E84CA769210167A6A66523B47&aggs%5BproductTypes%5D%5Bfilters%5D=ELEMENT&searchWithinIds=ECFD8F5C64F47F3F5A3D395C15B7C493"},{"id":"menu_item-0_cat-4_subCat-0_link-3","label":"New Elements","openExternal":false,"url":"/core/publications/elements/published-elements?aggs%5BproductTypes%5D%5Bfilters%5D=ELEMENT&aggs%5BproductDate%5D%5Bfilters%5D=Last%203%20months&searchWithinIds=ECFD8F5C64F47F3F5A3D395C15B7C493"}]},{"id":"menu_item-0_cat-4_subCat-1","label":"Subjects (A-E)","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-0_cat-4_subCat-1_link-0","label":"Anthropology","openExternal":false,"url":"/core/elements/subject/Anthropology/2E44A5AF2838E017617A26DD79FAEAEE"},{"id":"menu_item-0_cat-4_subCat-1_link-1","label":"Archaeology","openExternal":false,"url":"/core/elements/subject/Archaeology/63A50B5368A9F97F8AA2D6AB965B5F4C"},{"id":"menu_item-0_cat-4_subCat-1_link-2","label":"Classical Studies","openExternal":false,"url":"/core/elements/subject/Classical%20Studies/DDC63B7F5792FE2A95D1FB15F76E3F42"},{"id":"menu_item-0_cat-4_subCat-1_link-3","label":"Computer Science","openExternal":false,"url":"/core/elements/subject/Computer%20Science/A57E10708F64FB69CE78C81A5C2A6555"},{"id":"menu_item-0_cat-4_subCat-1_link-4","label":"Drama, Theatre, Performance Studies","openExternal":false,"url":"/core/elements/subject/Drama,%20Theatre,%20Performance%20Studies/2825E4E39F2D641B36543EE80FB1DEA3"},{"id":"menu_item-0_cat-4_subCat-1_link-5","label":"Earth and Environmental Sciences","openExternal":false,"url":"/core/elements/subject/Earth%20and%20Environmental%20Sciences/F470FBF5683D93478C7CAE5A30EF9AE8"},{"id":"menu_item-0_cat-4_subCat-1_link-6","label":"Economics","openExternal":false,"url":"/core/elements/subject/Economics/FA44491F1F55F917C43E9832715B9DE7"},{"id":"menu_item-0_cat-4_subCat-1_link-7","label":"Education","openExternal":false,"url":"/core/elements/subject/Education/550D00F8DF590F2598CF7CC0038E24D1"},{"id":"menu_item-0_cat-4_subCat-1_link-8","label":"Engineering","openExternal":false,"url":"/core/elements/subject/Engineering/CCC62FE56DCC1D050CA1340C1CCF46F5"}]},{"id":"menu_item-0_cat-4_subCat-2","label":" Subjects (F-O)","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-0_cat-4_subCat-2_link-0","label":"Film, Media, Mass Communication","openExternal":false,"url":"/core/elements/subject/Film,%20Media,%20Mass%20Communication/4B91F10E834814A90CE718E7831E492F"},{"id":"menu_item-0_cat-4_subCat-2_link-1","label":"History","openExternal":false,"url":"/core/elements/subject/History/66BE42A30172E280FDE64F8EE2F485B0"},{"id":"menu_item-0_cat-4_subCat-2_link-2","label":"Language and Linguistics","openExternal":false,"url":"/core/elements/subject/Language%20and%20Linguistics/140D314098408C26BDF3009F7FF858E9"},{"id":"menu_item-0_cat-4_subCat-2_link-3","label":"Law","openExternal":false,"url":"/core/elements/subject/Law/7C9FB6788DD8D7E6696263BC774F4D5B"},{"id":"menu_item-0_cat-4_subCat-2_link-4","label":"Life Sciences","openExternal":false,"url":"/core/elements/subject/Life%20Sciences/E044EF2F61B601378786E9EDA901B2D5"},{"id":"menu_item-0_cat-4_subCat-2_link-5","label":"Literature","openExternal":false,"url":"/core/elements/subject/Literature/F2434ADC122145767C6C3B988A8E9BD5"},{"id":"menu_item-0_cat-4_subCat-2_link-6","label":"Management","openExternal":false,"url":"/core/elements/subject/Management/0EDCC0540639B06A5669BDEEF50C4CBE"},{"id":"menu_item-0_cat-4_subCat-2_link-7","label":"Mathematics","openExternal":false,"url":"/core/elements/subject/Mathematics/FA1467C44B5BD46BB8AA6E58C2252153"},{"id":"menu_item-0_cat-4_subCat-2_link-8","label":"Medicine","openExternal":false,"url":"/core/elements/subject/Medicine/66FF02B2A4F83D9A645001545197F287"},{"id":"menu_item-0_cat-4_subCat-2_link-9","label":"Music","openExternal":false,"url":"/core/elements/subject/Music/A370B5604591CB3C7F9AFD892DDF7BD1"}]},{"id":"menu_item-0_cat-4_subCat-3","label":" Subjects (P-Z)","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-0_cat-4_subCat-3_link-0","label":"Philosophy","openExternal":false,"url":"/core/elements/subject/Philosophy/2D1AC3C0E174F1F1A93F8C7DE19E0FAB"},{"id":"menu_item-0_cat-4_subCat-3_link-1","label":"Physics and Astronomy","openExternal":false,"url":"/core/elements/subject/Physics%20and%20Astronomy/DBFB610E9FC5E012C011430C0573CC06"},{"id":"menu_item-0_cat-4_subCat-3_link-2","label":"Politics and International Relations","openExternal":false,"url":"/core/elements/subject/Politics%20and%20International%20Relations/3BF83347E5E456DAC34F3FABFC8BBF4E"},{"id":"menu_item-0_cat-4_subCat-3_link-3","label":"Psychology","openExternal":false,"url":"/core/elements/subject/Psychology/21B42A72BA3E4CB0E3315E5B1B71B07F"},{"id":"menu_item-0_cat-4_subCat-3_link-4","label":"Religion","openExternal":false,"url":"/core/elements/subject/Religion/53E51D24FB488962B9364A2C4B45D1C3"},{"id":"menu_item-0_cat-4_subCat-3_link-5","label":"Sociology","openExternal":false,"url":"/core/elements/subject/Sociology/0E2CD53A93003DF17E52D753F6E90683"},{"id":"menu_item-0_cat-4_subCat-3_link-6","label":"Statistics and Probability","openExternal":false,"url":"/core/elements/subject/Statistics%20and%20Probability/3150B8B0D1B0B4E8DC17EC9EDFD9CA26"}]}]},{"id":"menu_item-0_cat-5","label":"Textbooks","openExternal":null,"url":"/core/publications/textbooks","menuTopics":[{"id":"menu_item-0_cat-5_subCat-0","label":"Explore","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-0_cat-5_subCat-0_link-0","label":"Cambridge Higher Education","openExternal":false,"url":"/highereducation/"},{"id":"menu_item-0_cat-5_subCat-0_link-1","label":"Title list","openExternal":false,"url":"/highereducation/services/librarians/title-list"},{"id":"menu_item-0_cat-5_subCat-0_link-2","label":"New titles","openExternal":false,"url":"/highereducation/search?sortBy=publication_date&aggs=%24productDate%24Last%25206%2520months%3Atrue%26Last%252012%2520months%3Atrue%26Last%25203%2520years%3Atrue%26Over%25203%2520years%3Atrue%3B%3B&event=SE-AU_PREF"}]}]},{"id":"menu_item-0_cat-6","label":"Collections","openExternal":null,"url":"/core/publications/collections","menuTopics":[{"id":"menu_item-0_cat-6_subCat-0","label":"Book collections","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-0_cat-6_subCat-0_link-0","label":"Cambridge Companions","openExternal":false,"url":"/core/publications/collections/cambridge-companions"},{"id":"menu_item-0_cat-6_subCat-0_link-1","label":"Cambridge Editions","openExternal":false,"url":"/core/publications/collections/cambridge-editions"},{"id":"menu_item-0_cat-6_subCat-0_link-2","label":"Cambridge Histories","openExternal":false,"url":"/core/publications/collections/cambridge-histories"},{"id":"menu_item-0_cat-6_subCat-0_link-3","label":"Cambridge Library Collection","openExternal":false,"url":"/core/publications/collections/cambridge-library-collection"},{"id":"menu_item-0_cat-6_subCat-0_link-4","label":"Cambridge Shakespeare","openExternal":false,"url":"/core/publications/collections/cambridge-shakespeare"},{"id":"menu_item-0_cat-6_subCat-0_link-5","label":"Cambridge Handbooks","openExternal":false,"url":"/core/publications/collections/cambridgehandbooks"}]},{"id":"menu_item-0_cat-6_subCat-1","label":" Book collections (cont.)","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-0_cat-6_subCat-1_link-0","label":"Dispute Settlement Reports Online","openExternal":false,"url":"/core/publications/collections/dispute-settlement-reports-online"},{"id":"menu_item-0_cat-6_subCat-1_link-1","label":"Flip it Open","openExternal":false,"url":"/core/publications/collections/flip-it-open"},{"id":"menu_item-0_cat-6_subCat-1_link-2","label":"Hemingway Letters","openExternal":false,"url":"/core/publications/collections/hemingway-letters"},{"id":"menu_item-0_cat-6_subCat-1_link-3","label":"Shakespeare Survey","openExternal":false,"url":"/core/publications/collections/shakespeare-survey"},{"id":"menu_item-0_cat-6_subCat-1_link-4","label":"Stahl Online","openExternal":false,"url":"/core/publications/collections/stahl-online"},{"id":"menu_item-0_cat-6_subCat-1_link-5","label":"The Correspondence of Isaac Newton","openExternal":false,"url":"/core/publications/collections/the-correspondence-of-isaac-newton"}]},{"id":"menu_item-0_cat-6_subCat-2","label":"Journal collections","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-0_cat-6_subCat-2_link-0","label":"Cambridge Forum","openExternal":false,"url":"/core/publications/collections/cambridge-forum"},{"id":"menu_item-0_cat-6_subCat-2_link-1","label":"Cambridge Law Reports Collection","openExternal":false,"url":"/core/publications/collections/cambridge-law-reports-collection"},{"id":"menu_item-0_cat-6_subCat-2_link-2","label":"Cambridge Prisms","openExternal":false,"url":"/core/publications/collections/cambridge-prisms"},{"id":"menu_item-0_cat-6_subCat-2_link-3","label":"Research Directions","openExternal":false,"url":"/core/publications/collections/research-directions"}]},{"id":"menu_item-0_cat-6_subCat-3","label":"Series","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-0_cat-6_subCat-3_link-0","label":"All series","openExternal":false,"url":"/core/publications/collections/series"}]}]},{"id":"menu_item-0_cat-7","label":"Partners","openExternal":null,"url":"/core/publications/publishing-partners","menuTopics":[{"id":"menu_item-0_cat-7_subCat-0","label":"Partners","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-0_cat-7_subCat-0_link-0","label":"Agenda Publishing","openExternal":false,"url":"/core/publications/publishing-partners/agenda-publishing"},{"id":"menu_item-0_cat-7_subCat-0_link-1","label":"Amsterdam University Press","openExternal":false,"url":"/core/publications/publishing-partners/amsterdam-university-press"},{"id":"menu_item-0_cat-7_subCat-0_link-2","label":"Anthem Press","openExternal":false,"url":"/core/publications/publishing-partners/anthem-press"},{"id":"menu_item-0_cat-7_subCat-0_link-3","label":"Boydell & Brewer","openExternal":false,"url":"/core/publications/publishing-partners/boydell-brewer"},{"id":"menu_item-0_cat-7_subCat-0_link-4","label":"Bristol University Press","openExternal":false,"url":"/core/publications/publishing-partners/bristol-university-press"},{"id":"menu_item-0_cat-7_subCat-0_link-5","label":"Edinburgh University Press","openExternal":false,"url":"/core/publications/publishing-partners/edinburgh-university-press"},{"id":"menu_item-0_cat-7_subCat-0_link-6","label":"Emirates Center for Strategic Studies and Research","openExternal":false,"url":"/core/publications/publishing-partners/emirates-center"},{"id":"menu_item-0_cat-7_subCat-0_link-7","label":"Facet Publishing","openExternal":false,"url":"/core/publications/publishing-partners/facet-publishing"}]},{"id":"menu_item-0_cat-7_subCat-1","label":" Partners (cont.)","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-0_cat-7_subCat-1_link-0","label":"Foundation Books","openExternal":false,"url":"/core/publications/publishing-partners/foundation-books"},{"id":"menu_item-0_cat-7_subCat-1_link-1","label":"Intersentia","openExternal":false,"url":"/core/publications/publishing-partners/intersentia"},{"id":"menu_item-0_cat-7_subCat-1_link-2","label":"ISEAS-Yusof Ishak Institute","openExternal":false,"url":"/core/publications/publishing-partners/iseas"},{"id":"menu_item-0_cat-7_subCat-1_link-3","label":"Jagiellonian University Press","openExternal":false,"url":"/core/publications/publishing-partners/jagiellonian-university-press"},{"id":"menu_item-0_cat-7_subCat-1_link-4","label":"Royal Economic Society","openExternal":false,"url":"/core/publications/publishing-partners/royal-economic-society"},{"id":"menu_item-0_cat-7_subCat-1_link-5","label":"Unisa Press","openExternal":false,"url":"/core/publications/publishing-partners/unisa-press"},{"id":"menu_item-0_cat-7_subCat-1_link-6","label":"The University of Adelaide Press","openExternal":false,"url":"/core/publications/publishing-partners/university-adelaide-press"},{"id":"menu_item-0_cat-7_subCat-1_link-7","label":"Wits University Press","openExternal":false,"url":"/core/publications/publishing-partners/wits-university-press"}]}]}]},{"id":"menu_item-1","label":"Services","openExternal":null,"url":"","isMegaMenu":true,"menuCats":[{"id":"menu_item-1_cat-0","label":"About","openExternal":null,"url":"/core/services/about/about","menuTopics":[{"id":"menu_item-1_cat-0_subCat-0","label":"About Cambridge Core","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-1_cat-0_subCat-0_link-0","label":"About","openExternal":false,"url":"/core/services/about/about"},{"id":"menu_item-1_cat-0_subCat-0_link-1","label":"Accessibility","openExternal":false,"url":"/core/services/about/accessibility"},{"id":"menu_item-1_cat-0_subCat-0_link-2","label":"CrossMark policy","openExternal":false,"url":"/core/services/about/crossmark-policy"},{"id":"menu_item-1_cat-0_subCat-0_link-3","label":"Ethical Standards","openExternal":false,"url":"/core/services/about/ethical-standards"}]},{"id":"menu_item-1_cat-0_subCat-1","label":"Environment and sustainability","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-1_cat-0_subCat-1_link-0","label":"Environment and sustainability","openExternal":false,"url":"/core/services/about/environment-and-sustainability"},{"id":"menu_item-1_cat-0_subCat-1_link-1","label":"Reducing print","openExternal":false,"url":"/core/services/about/reducing-print"},{"id":"menu_item-1_cat-0_subCat-1_link-2","label":"Journals moving to online only","openExternal":false,"url":"/core/services/about/journals-moving-to-online-only"}]},{"id":"menu_item-1_cat-0_subCat-2","label":"Guides","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-1_cat-0_subCat-2_link-0","label":"User guides","openExternal":false,"url":"/core/services/about/user-guides"},{"id":"menu_item-1_cat-0_subCat-2_link-1","label":"User Guides and Videos","openExternal":false,"url":"/core/services/about/user-guides-and-videos"},{"id":"menu_item-1_cat-0_subCat-2_link-2","label":"Support Videos","openExternal":false,"url":"/core/services/about/support-videos"},{"id":"menu_item-1_cat-0_subCat-2_link-3","label":"Training","openExternal":false,"url":"/core/services/about/training"}]},{"id":"menu_item-1_cat-0_subCat-3","label":"Help","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-1_cat-0_subCat-3_link-0","label":"Cambridge Core help","openExternal":false,"url":"https://corehelp.cambridge.org/"},{"id":"menu_item-1_cat-0_subCat-3_link-1","label":"Contact us","openExternal":false,"url":"https://corehelp.cambridge.org/hc/en-gb/p/contact-information"},{"id":"menu_item-1_cat-0_subCat-3_link-2","label":"Technical support","openExternal":false,"url":"https://corehelp.cambridge.org/hc/en-gb/requests/new"}]}]},{"id":"menu_item-1_cat-1","label":"Agents","openExternal":null,"url":"/core/services/agents/services-for-agents","menuTopics":[{"id":"menu_item-1_cat-1_subCat-0","label":"Services for agents","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-1_cat-1_subCat-0_link-0","label":"Services for agents","openExternal":false,"url":"/core/services/agents/services-for-agents"},{"id":"menu_item-1_cat-1_subCat-0_link-1","label":"Journals for agents","openExternal":false,"url":"/core/services/agents/journals-for-agents"},{"id":"menu_item-1_cat-1_subCat-0_link-2","label":"Books for agents","openExternal":false,"url":"/core/services/agents/books-for-agents"},{"id":"menu_item-1_cat-1_subCat-0_link-3","label":"Price list","openExternal":false,"url":"/core/services/agents/price-list"}]}]},{"id":"menu_item-1_cat-2","label":"Authors","openExternal":null,"url":"/core/services/authors/authors","menuTopics":[{"id":"menu_item-1_cat-2_subCat-0","label":"Journals","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-1_cat-2_subCat-0_link-0","label":"Journals","openExternal":false,"url":"/core/services/authors/journals"},{"id":"menu_item-1_cat-2_subCat-0_link-1","label":"Journal publishing statistics","openExternal":false,"url":"/core/services/authors/journal-publishing-statistics"},{"id":"menu_item-1_cat-2_subCat-0_link-2","label":"Corresponding author","openExternal":false,"url":"/core/services/authors/corresponding-author"},{"id":"menu_item-1_cat-2_subCat-0_link-3","label":"Seeking permission to use copyrighted material","openExternal":false,"url":"/core/services/authors/seeking-permission-to-use-copyrighted-material"},{"id":"menu_item-1_cat-2_subCat-0_link-4","label":"Publishing supplementary material","openExternal":false,"url":"/core/services/authors/publishing-supplementary-material"},{"id":"menu_item-1_cat-2_subCat-0_link-5","label":"Writing an effective abstract","openExternal":false,"url":"/core/services/authors/writing-an-effective-abstract"},{"id":"menu_item-1_cat-2_subCat-0_link-6","label":"Journal production - FAQs","openExternal":false,"url":"/core/services/authors/journal-production-faqs"}]},{"id":"menu_item-1_cat-2_subCat-1","label":"Journals (cont.)","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-1_cat-2_subCat-1_link-0","label":"Author affiliations","openExternal":false,"url":"/core/services/authors/author-affiliations"},{"id":"menu_item-1_cat-2_subCat-1_link-1","label":"Co-reviewing policy","openExternal":false,"url":"/core/services/authors/co-reviewing-policy"},{"id":"menu_item-1_cat-2_subCat-1_link-2","label":"Digital Author Publishing Agreement - FAQs","openExternal":false,"url":"/core/services/authors/digital-author-publishing-agreement-faqs"},{"id":"menu_item-1_cat-2_subCat-1_link-3","label":"Anonymising your manuscript","openExternal":false,"url":"/core/services/authors/anonymising-your-manuscript"},{"id":"menu_item-1_cat-2_subCat-1_link-4","label":"Publishing open access","openExternal":false,"url":"/core/services/authors/publishing-open-access"},{"id":"menu_item-1_cat-2_subCat-1_link-5","label":"Converting your article to open access","openExternal":false,"url":"/core/services/authors/converting-your-article-to-open-access"},{"id":"menu_item-1_cat-2_subCat-1_link-6","label":"Publishing Open Access - webinars","openExternal":false,"url":"/core/services/authors/publishing-open-access-webinars"}]},{"id":"menu_item-1_cat-2_subCat-2","label":"Journals (cont.)","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-1_cat-2_subCat-2_link-0","label":"Preparing and submitting your paper","openExternal":false,"url":"/core/services/authors/preparing-and-submitting-your-paper"},{"id":"menu_item-1_cat-2_subCat-2_link-1","label":"Publishing an accepted paper","openExternal":false,"url":"/core/services/authors/publishing-an-accepted-paper"},{"id":"menu_item-1_cat-2_subCat-2_link-2","label":"Promoting your published paper","openExternal":false,"url":"/core/services/authors/promoting-your-published-paper"},{"id":"menu_item-1_cat-2_subCat-2_link-3","label":"Measuring impact","openExternal":false,"url":"/core/services/authors/measuring-impact"},{"id":"menu_item-1_cat-2_subCat-2_link-4","label":"Journals artwork guide","openExternal":false,"url":"/core/services/authors/journals-artwork-guide"},{"id":"menu_item-1_cat-2_subCat-2_link-5","label":"Using ORCID","openExternal":false,"url":"/core/services/authors/using-orcid"}]},{"id":"menu_item-1_cat-2_subCat-3","label":"Books","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-1_cat-2_subCat-3_link-0","label":"Books","openExternal":false,"url":"/core/services/authors/books"},{"id":"menu_item-1_cat-2_subCat-3_link-1","label":"Marketing your book","openExternal":false,"url":"/core/services/authors/marketing-your-book"},{"id":"menu_item-1_cat-2_subCat-3_link-2","label":"Author guides for Cambridge Elements","openExternal":false,"url":"/core/services/authors/elements-user-guides"}]}]},{"id":"menu_item-1_cat-3","label":"Corporates","openExternal":null,"url":"/core/services/corporates/services-for-corporates","menuTopics":[{"id":"menu_item-1_cat-3_subCat-0","label":"Corporates","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-1_cat-3_subCat-0_link-0","label":"Commercial reprints","openExternal":false,"url":"/core/services/corporates/commercial-reprints"},{"id":"menu_item-1_cat-3_subCat-0_link-1","label":"Advertising","openExternal":false,"url":"/core/services/corporates/advertising"},{"id":"menu_item-1_cat-3_subCat-0_link-2","label":"Sponsorship","openExternal":false,"url":"/core/services/corporates/sponsorship"},{"id":"menu_item-1_cat-3_subCat-0_link-3","label":"Book special sales","openExternal":false,"url":"/core/services/corporates/book-special-sales"},{"id":"menu_item-1_cat-3_subCat-0_link-4","label":"Contact us","openExternal":false,"url":"/core/services/corporates/contact-us"}]}]},{"id":"menu_item-1_cat-4","label":"Editors","openExternal":null,"url":"/core/services/editors/editors","menuTopics":[{"id":"menu_item-1_cat-4_subCat-0","label":"Information","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-1_cat-4_subCat-0_link-0","label":"Journal development","openExternal":false,"url":"/core/services/editors/journal-development"},{"id":"menu_item-1_cat-4_subCat-0_link-1","label":"Peer review for editors","openExternal":false,"url":"/core/services/editors/peer-review-for-editors"},{"id":"menu_item-1_cat-4_subCat-0_link-2","label":"Open access for editors","openExternal":false,"url":"/core/services/editors/open-access-for-editors"},{"id":"menu_item-1_cat-4_subCat-0_link-3","label":"Policies and guidelines","openExternal":false,"url":"/core/services/editors/policies-and-guidelines"}]},{"id":"menu_item-1_cat-4_subCat-1","label":"Resources","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-1_cat-4_subCat-1_link-0","label":"The editor's role","openExternal":false,"url":"/core/services/editors/the-editors-role"},{"id":"menu_item-1_cat-4_subCat-1_link-1","label":"Open research for editors","openExternal":false,"url":"/core/services/editors/open-research-for-editors"},{"id":"menu_item-1_cat-4_subCat-1_link-2","label":"Engagement and promotion","openExternal":false,"url":"/core/services/editors/engagement-and-promotion"},{"id":"menu_item-1_cat-4_subCat-1_link-3","label":"Blogging","openExternal":false,"url":"/core/services/editors/blogging"},{"id":"menu_item-1_cat-4_subCat-1_link-4","label":"Social media","openExternal":false,"url":"/core/services/editors/social-media"}]}]},{"id":"menu_item-1_cat-5","label":"Librarians","openExternal":null,"url":"/core/services/librarians/librarians","menuTopics":[{"id":"menu_item-1_cat-5_subCat-0","label":"Information","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-1_cat-5_subCat-0_link-0","label":"Open Access for Librarians","openExternal":false,"url":"/core/services/librarians/open-access-for-librarians"},{"id":"menu_item-1_cat-5_subCat-0_link-1","label":"Transformative agreements","openExternal":false,"url":"https://www.cambridge.org/core/services/open-access-policies/read-and-publish-agreements"},{"id":"menu_item-1_cat-5_subCat-0_link-2","label":"Transformative Agreements - FAQs","openExternal":false,"url":"/core/services/librarians/transformative-agreements-faqs"},{"id":"menu_item-1_cat-5_subCat-0_link-3","label":"Evidence based acquisition","openExternal":false,"url":"/core/services/librarians/evidence-based-acquisition"},{"id":"menu_item-1_cat-5_subCat-0_link-4","label":"ebook news & updates","openExternal":false,"url":"/core/services/librarians/ebook-news-and-updates"},{"id":"menu_item-1_cat-5_subCat-0_link-5","label":"Cambridge libraries of the world podcast","openExternal":false,"url":"/core/services/librarians/cambridge-libraries-of-the-world-podcast"},{"id":"menu_item-1_cat-5_subCat-0_link-6","label":"Purchasing models","openExternal":false,"url":"/core/services/librarians/purchasing-models"},{"id":"menu_item-1_cat-5_subCat-0_link-7","label":"Journals Publishing Updates","openExternal":false,"url":"/core/services/librarians/journals-publishing-updates"}]},{"id":"menu_item-1_cat-5_subCat-1","label":"Products","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-1_cat-5_subCat-1_link-0","label":"Cambridge frontlist","openExternal":false,"url":"/core/services/librarians/cambridge-frontlist"},{"id":"menu_item-1_cat-5_subCat-1_link-1","label":"Cambridge journals digital archive","openExternal":false,"url":"/core/services/librarians/cambridge-journals-digital-archive"},{"id":"menu_item-1_cat-5_subCat-1_link-2","label":"Hot topics","openExternal":false,"url":"/core/services/librarians/hot-topics"},{"id":"menu_item-1_cat-5_subCat-1_link-3","label":"Other digital products","openExternal":false,"url":"/core/services/librarians/other-digital-products"},{"id":"menu_item-1_cat-5_subCat-1_link-4","label":"Perpetual access products","openExternal":false,"url":"/core/services/librarians/perpetual-access-products"},{"id":"menu_item-1_cat-5_subCat-1_link-5","label":"Price list","openExternal":false,"url":"/core/services/librarians/price-list"},{"id":"menu_item-1_cat-5_subCat-1_link-6","label":"Developing country programme","openExternal":false,"url":"/core/services/librarians/developing-country-programme"},{"id":"menu_item-1_cat-5_subCat-1_link-7","label":"New content","openExternal":false,"url":"/core/services/librarians/new-content"}]},{"id":"menu_item-1_cat-5_subCat-2","label":"Tools","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-1_cat-5_subCat-2_link-0","label":"Eligibility checker","openExternal":false,"url":"/core/eligibility-checker"},{"id":"menu_item-1_cat-5_subCat-2_link-1","label":"Transformative agreements","openExternal":false,"url":"https://www.cambridge.org/core/services/open-access-policies/read-and-publish-agreements"},{"id":"menu_item-1_cat-5_subCat-2_link-2","label":"KBART","openExternal":false,"url":"https://www.cambridge.org/core/services/librarians/kbart"},{"id":"menu_item-1_cat-5_subCat-2_link-3","label":"MARC records","openExternal":false,"url":"https://www.cambridge.org/core/services/librarians/marc-records"},{"id":"menu_item-1_cat-5_subCat-2_link-4","label":"Using MARCEdit for MARC records","openExternal":false,"url":"/core/services/librarians/using-marcedit-for-marc-records"},{"id":"menu_item-1_cat-5_subCat-2_link-5","label":"Inbound OpenURL specifications","openExternal":false,"url":"/core/services/librarians/inbound-openurl-specifications"},{"id":"menu_item-1_cat-5_subCat-2_link-6","label":"COUNTER report types","openExternal":false,"url":"/core/services/librarians/counter-report-types"}]},{"id":"menu_item-1_cat-5_subCat-3","label":"Resources","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-1_cat-5_subCat-3_link-0","label":"Catalogues and resources","openExternal":false,"url":"/core/services/librarians/catalogues-and-resources"},{"id":"menu_item-1_cat-5_subCat-3_link-1","label":"Making the most of your EBA","openExternal":false,"url":"/core/services/librarians/making-the-most-of-your-eba"},{"id":"menu_item-1_cat-5_subCat-3_link-2","label":"Posters","openExternal":false,"url":"/core/services/librarians/posters"},{"id":"menu_item-1_cat-5_subCat-3_link-3","label":"Leaflets and brochures","openExternal":false,"url":"/core/services/librarians/leaflets-and-brochures"},{"id":"menu_item-1_cat-5_subCat-3_link-4","label":"Additional resources","openExternal":false,"url":"/core/services/librarians/additional-resources"},{"id":"menu_item-1_cat-5_subCat-3_link-5","label":"Find my sales contact","openExternal":false,"url":"/core/services/librarians/find-my-sales-contact"},{"id":"menu_item-1_cat-5_subCat-3_link-6","label":"Webinars","openExternal":false,"url":"/core/services/librarians/webinars"},{"id":"menu_item-1_cat-5_subCat-3_link-7","label":"Read and publish resources","openExternal":false,"url":"/core/services/librarians/read-and-publish-resources"}]}]},{"id":"menu_item-1_cat-6","label":"Peer review","openExternal":null,"url":"/core/services/peer-review/peer-review","menuTopics":[{"id":"menu_item-1_cat-6_subCat-0","label":"Peer review","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-1_cat-6_subCat-0_link-0","label":"How to peer review journal articles","openExternal":false,"url":"/core/services/peer-review/how-to-peer-review-journal-articles"},{"id":"menu_item-1_cat-6_subCat-0_link-1","label":"How to peer review book proposals","openExternal":false,"url":"/core/services/peer-review/how-to-peer-review-book-proposals"},{"id":"menu_item-1_cat-6_subCat-0_link-2","label":"How to peer review Registered Reports","openExternal":false,"url":"/core/services/peer-review/how-to-peer-review-registered-reports"},{"id":"menu_item-1_cat-6_subCat-0_link-3","label":"Peer review FAQs","openExternal":false,"url":"/core/services/peer-review/peer-review-faqs"},{"id":"menu_item-1_cat-6_subCat-0_link-4","label":"Ethics in peer review","openExternal":false,"url":"/core/services/peer-review/ethics-in-peer-review"},{"id":"menu_item-1_cat-6_subCat-0_link-5","label":"Online peer review systems","openExternal":false,"url":"/core/services/peer-review/online-peer-review-systems"},{"id":"menu_item-1_cat-6_subCat-0_link-6","label":"A guide to Publons","openExternal":false,"url":"/core/services/peer-review/a-guide-to-publons"}]}]},{"id":"menu_item-1_cat-7","label":"Publishing ethics","openExternal":null,"url":"/core/services/publishing-ethics/publishing-ethics","menuTopics":[{"id":"menu_item-1_cat-7_subCat-0","label":"Journals ","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-1_cat-7_subCat-0_link-0","label":"Publishing ethics guidelines for journals","openExternal":false,"url":"/core/services/publishing-ethics/publishing-ethics-guidelines-journals"},{"id":"menu_item-1_cat-7_subCat-0_link-1","label":"Core editorial policies for journals","openExternal":false,"url":"/core/services/publishing-ethics/core-editorial-policies-journals"},{"id":"menu_item-1_cat-7_subCat-0_link-2","label":"Authorship and contributorship for journals","openExternal":false,"url":"/core/services/publishing-ethics/authorship-and-contributorship-journals"},{"id":"menu_item-1_cat-7_subCat-0_link-3","label":"Affiliations for journals","openExternal":false,"url":"/core/services/publishing-ethics/affiliations-journals"},{"id":"menu_item-1_cat-7_subCat-0_link-4","label":"Research ethics for journals","openExternal":false,"url":"/core/services/publishing-ethics/research-ethics-journals"},{"id":"menu_item-1_cat-7_subCat-0_link-5","label":"Competing interests and funding for journals","openExternal":false,"url":"/core/services/publishing-ethics/competing-interests-and-funding-journals"}]},{"id":"menu_item-1_cat-7_subCat-1","label":"Journals (cont.)","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-1_cat-7_subCat-1_link-0","label":"Data and supporting evidence for journals","openExternal":false,"url":"/core/services/publishing-ethics/data-and-supporting-evidence-for-journals"},{"id":"menu_item-1_cat-7_subCat-1_link-1","label":"Misconduct for journals","openExternal":false,"url":"/core/services/publishing-ethics/misconduct-journals"},{"id":"menu_item-1_cat-7_subCat-1_link-2","label":"Corrections, retractions and removals for journals","openExternal":false,"url":"/core/services/publishing-ethics/corrections-retractions-and-removals-journals"},{"id":"menu_item-1_cat-7_subCat-1_link-3","label":"Versions and adaptations for journals","openExternal":false,"url":"/core/services/publishing-ethics/versions-and-adaptations-journals"},{"id":"menu_item-1_cat-7_subCat-1_link-4","label":"Libel, defamation and freedom of expression","openExternal":false,"url":"/core/services/publishing-ethics/libel-defamation-and-freedom-of-expression"},{"id":"menu_item-1_cat-7_subCat-1_link-5","label":"Business ethics journals","openExternal":false,"url":"/core/services/publishing-ethics/business-ethics-journals"}]},{"id":"menu_item-1_cat-7_subCat-2","label":"Books","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-1_cat-7_subCat-2_link-0","label":"Publishing ethics guidelines for books","openExternal":false,"url":"/core/services/publishing-ethics/publishing-ethics-guidelines-books"},{"id":"menu_item-1_cat-7_subCat-2_link-1","label":"Core editorial policies for books","openExternal":false,"url":"/core/services/publishing-ethics/core-editorial-policies-books"},{"id":"menu_item-1_cat-7_subCat-2_link-2","label":"Authorship and contributorship for books","openExternal":false,"url":"/core/services/publishing-ethics/authorship-and-contributorship-books"},{"id":"menu_item-1_cat-7_subCat-2_link-3","label":"Affiliations for books","openExternal":false,"url":"/core/services/publishing-ethics/affiliations-books"},{"id":"menu_item-1_cat-7_subCat-2_link-4","label":"Research ethics for books","openExternal":false,"url":"/core/services/publishing-ethics/research-ethics-books"},{"id":"menu_item-1_cat-7_subCat-2_link-5","label":"Competing interests and funding for books","openExternal":false,"url":"/core/services/publishing-ethics/competing-interests-and-funding-books"}]},{"id":"menu_item-1_cat-7_subCat-3","label":"Books (cont.)","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-1_cat-7_subCat-3_link-0","label":"Data and supporting evidence for books","openExternal":false,"url":"/core/services/publishing-ethics/data-and-supporting-evidence-books"},{"id":"menu_item-1_cat-7_subCat-3_link-1","label":"Misconduct for books","openExternal":false,"url":"/core/services/publishing-ethics/misconduct-books"},{"id":"menu_item-1_cat-7_subCat-3_link-2","label":"Corrections, retractions and removals for books","openExternal":false,"url":"/core/services/publishing-ethics/corrections-retractions-and-removals-books"},{"id":"menu_item-1_cat-7_subCat-3_link-3","label":"Versions and adaptations for books","openExternal":false,"url":"/core/services/publishing-ethics/versions-and-adaptations-books"},{"id":"menu_item-1_cat-7_subCat-3_link-4","label":"Libel, defamation and freedom of expression","openExternal":false,"url":"/core/services/publishing-ethics/libel-defamation-and-freedom-of-expression"},{"id":"menu_item-1_cat-7_subCat-3_link-5","label":"Business ethics books","openExternal":false,"url":"/core/services/publishing-ethics/business-ethics-books"}]}]},{"id":"menu_item-1_cat-8","label":"Publishing partners","openExternal":null,"url":"/core/services/publishing-partners/publishing-partnerships","menuTopics":[{"id":"menu_item-1_cat-8_subCat-0","label":"Publishing partners","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-1_cat-8_subCat-0_link-0","label":"Publishing partnerships","openExternal":false,"url":"/core/services/publishing-partners/publishing-partnerships"},{"id":"menu_item-1_cat-8_subCat-0_link-1","label":"Partner books","openExternal":false,"url":"/core/services/publishing-partners/partner-books"},{"id":"menu_item-1_cat-8_subCat-0_link-2","label":"eBook publishing partnerships","openExternal":false,"url":"/core/services/publishing-partners/ebook-publishing-partnerships"},{"id":"menu_item-1_cat-8_subCat-0_link-3","label":"Journal publishing partnerships","openExternal":false,"url":"/core/services/publishing-partners/journal-publishing-partnerships"}]},{"id":"menu_item-1_cat-8_subCat-1","label":"Publishing partners (cont.)","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-1_cat-8_subCat-1_link-0","label":"Journals publishing","openExternal":false,"url":"/core/services/publishing-partners/journals-publishing"},{"id":"menu_item-1_cat-8_subCat-1_link-1","label":"Customer support","openExternal":false,"url":"/core/services/publishing-partners/customer-support"},{"id":"menu_item-1_cat-8_subCat-1_link-2","label":"Membership Services","openExternal":false,"url":"/core/services/publishing-partners/membership-services"},{"id":"menu_item-1_cat-8_subCat-1_link-3","label":"Our Team","openExternal":false,"url":"/core/services/publishing-partners/our-team"}]}]}]},{"id":"menu_item-2","label":"Open research","openExternal":null,"url":"","isMegaMenu":true,"menuCats":[{"id":"menu_item-2_cat-0","label":"Open access policies","openExternal":null,"url":"/core/services/open-research-policies/open-access-policies","menuTopics":[{"id":"menu_item-2_cat-0_subCat-0","label":"Open access policies","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-2_cat-0_subCat-0_link-0","label":"Open research","openExternal":false,"url":"/core/services/open-research-policies/open-research"},{"id":"menu_item-2_cat-0_subCat-0_link-1","label":"Open access policies","openExternal":false,"url":"/core/services/open-research-policies/open-access-policies"},{"id":"menu_item-2_cat-0_subCat-0_link-2","label":"Cambridge University Press and Plan S","openExternal":false,"url":"/core/services/open-research-policies/cambridge-university-press-and-plan-s"},{"id":"menu_item-2_cat-0_subCat-0_link-3","label":"Text and data mining","openExternal":false,"url":"/core/services/open-research-policies/text-and-data-mining"},{"id":"menu_item-2_cat-0_subCat-0_link-4","label":"Preprint policy","openExternal":false,"url":"/core/services/open-research-policies/preprint-policy"},{"id":"menu_item-2_cat-0_subCat-0_link-5","label":"Social sharing","openExternal":false,"url":"/core/services/open-research-policies/social-sharing"}]},{"id":"menu_item-2_cat-0_subCat-1","label":"Journals","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-2_cat-0_subCat-1_link-0","label":"Open access journals","openExternal":false,"url":"/core/services/open-research-policies/open-access-journals"},{"id":"menu_item-2_cat-0_subCat-1_link-1","label":"Gold open access journals","openExternal":false,"url":"/core/services/open-research-policies/gold-open-access-journals"},{"id":"menu_item-2_cat-0_subCat-1_link-2","label":"Transformative journals","openExternal":false,"url":"/core/services/open-research-policies/transformative-journals"},{"id":"menu_item-2_cat-0_subCat-1_link-3","label":"Green Open Access policy for journals","openExternal":false,"url":"/core/services/open-research-policies/green-open-access-policy-for-journals"},{"id":"menu_item-2_cat-0_subCat-1_link-4","label":"Transparent pricing policy for journals","openExternal":false,"url":"/core/services/open-research-policies/transparent-pricing-policy-for-journals"}]},{"id":"menu_item-2_cat-0_subCat-2","label":"Books and Elements","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-2_cat-0_subCat-2_link-0","label":"Open access books","openExternal":false,"url":"/core/services/open-research-policies/open-access-books"},{"id":"menu_item-2_cat-0_subCat-2_link-1","label":"Gold open access books","openExternal":false,"url":"/core/services/open-research-policies/gold-open-access-books"},{"id":"menu_item-2_cat-0_subCat-2_link-2","label":"Green Open Access policy for books","openExternal":false,"url":"/core/services/open-research-policies/green-open-access-policy-for-books"},{"id":"menu_item-2_cat-0_subCat-2_link-3","label":"Open access Elements","openExternal":false,"url":"/core/services/open-research-policies/open-access-elements"}]}]},{"id":"menu_item-2_cat-1","label":"Open access publishing","openExternal":null,"url":"/core/services/open-access-publishing/open-access","menuTopics":[{"id":"menu_item-2_cat-1_subCat-0","label":"About open access","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-2_cat-1_subCat-0_link-0","label":"Open research","openExternal":false,"url":"/core/services/open-access-publishing/open-research"},{"id":"menu_item-2_cat-1_subCat-0_link-1","label":"Open Access Week","openExternal":false,"url":"/core/services/open-access-publishing/open-access-week"},{"id":"menu_item-2_cat-1_subCat-0_link-2","label":"What is open access?","openExternal":false,"url":"/core/services/open-access-publishing/open-access"},{"id":"menu_item-2_cat-1_subCat-0_link-3","label":"Open access glossary","openExternal":false,"url":"/core/services/open-access-publishing/open-access-glossary"},{"id":"menu_item-2_cat-1_subCat-0_link-4","label":"Open access myths","openExternal":false,"url":"/core/services/open-access-publishing/open-access-myths"},{"id":"menu_item-2_cat-1_subCat-0_link-5","label":"Hybrid Open Access FAQs","openExternal":false,"url":"/core/services/open-access-publishing/hybrid-open-access-faqs"},{"id":"menu_item-2_cat-1_subCat-0_link-6","label":"Eligibility checker","openExternal":false,"url":"/core/eligibility-checker"}]},{"id":"menu_item-2_cat-1_subCat-1","label":"Open access resources","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-2_cat-1_subCat-1_link-0","label":"Open access resources","openExternal":false,"url":"/core/services/open-access-publishing/open-access-resources"},{"id":"menu_item-2_cat-1_subCat-1_link-1","label":"Benefits of open access","openExternal":false,"url":"/core/services/open-access-publishing/benefits-of-open-access"},{"id":"menu_item-2_cat-1_subCat-1_link-2","label":"Creative commons licenses","openExternal":false,"url":"/core/services/open-access-publishing/creative-commons-licenses"},{"id":"menu_item-2_cat-1_subCat-1_link-3","label":"Funder policies and mandates","openExternal":false,"url":"/core/services/open-access-publishing/funder-policies-and-mandates"},{"id":"menu_item-2_cat-1_subCat-1_link-4","label":"Article type definitions","openExternal":false,"url":"/core/services/open-access-publishing/article-type-definitions"},{"id":"menu_item-2_cat-1_subCat-1_link-5","label":"Convert your article to Open Access","openExternal":false,"url":"/core/services/open-access-publishing/convert-your-article-to-open-access"},{"id":"menu_item-2_cat-1_subCat-1_link-6","label":"Open access video resources","openExternal":false,"url":"/core/services/open-access-publishing/open-access-video-resources"}]}]},{"id":"menu_item-2_cat-2","label":"Open research initiatives","openExternal":null,"url":"","menuTopics":[{"id":"menu_item-2_cat-2_subCat-0","label":"Research transparency","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-2_cat-2_subCat-0_link-0","label":"Transparency and openness","openExternal":false,"url":"/core/services/open-research-initiatives/transparency-and-openness"},{"id":"menu_item-2_cat-2_subCat-0_link-1","label":"Open Practice Badges","openExternal":false,"url":"/core/services/open-research-initiatives/open-practice-badges"},{"id":"menu_item-2_cat-2_subCat-0_link-2","label":"OA organisations, initiatives & directories","openExternal":false,"url":"/core/services/open-research-initiatives/oa-organisations-initiatives-and-directories"},{"id":"menu_item-2_cat-2_subCat-0_link-3","label":"Registered Reports","openExternal":false,"url":"/core/services/open-research-initiatives/registered-reports"},{"id":"menu_item-2_cat-2_subCat-0_link-4","label":"Annotation for Transparent Inquiry (ATI)","openExternal":false,"url":"/core/services/open-research-initiatives/annotation-for-transparent-inquiry-ati"}]},{"id":"menu_item-2_cat-2_subCat-1","label":"Journal flips","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-2_cat-2_subCat-1_link-0","label":"Open access journal flips","openExternal":false,"url":"/core/services/open-research-initiatives/open-access-journal-flips"},{"id":"menu_item-2_cat-2_subCat-1_link-1","label":"OA Journal Flip FAQs","openExternal":false,"url":"/core/services/open-research-initiatives/oa-journal-flip-faqs"}]},{"id":"menu_item-2_cat-2_subCat-2","label":"Flip it Open","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-2_cat-2_subCat-2_link-0","label":"Flip it Open","openExternal":false,"url":"/core/services/open-research-initiatives/flip-it-open"},{"id":"menu_item-2_cat-2_subCat-2_link-1","label":"Flip it Open FAQs","openExternal":false,"url":"/core/services/open-research-initiatives/flip-it-open-faqs"}]}]},{"id":"menu_item-2_cat-3","label":"Open access funding","openExternal":null,"url":"/core/services/open-access-funding/funding-open-access-publication","menuTopics":[{"id":"menu_item-2_cat-3_subCat-0","label":"Open access funding","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-2_cat-3_subCat-0_link-0","label":"Funding open access publication","openExternal":false,"url":"/core/services/open-access-funding/funding-open-access-publication"},{"id":"menu_item-2_cat-3_subCat-0_link-1","label":"Cambridge Open Equity Initiative","openExternal":false,"url":"/core/services/open-access-funding/cambridge-open-equity-initiative"}]}]},{"id":"menu_item-2_cat-4","label":" Cambridge Open Engage","openExternal":null,"url":"/core/services/cambridge-open-engage/cambridge-open-engage","menuTopics":[{"id":"menu_item-2_cat-4_subCat-0","label":" Cambridge Open Engage","openExternal":null,"url":"","menuLinks":[{"id":"menu_item-2_cat-4_subCat-0_link-0","label":"Cambridge Open Engage","openExternal":false,"url":"/core/services/cambridge-open-engage/cambridge-open-engage"},{"id":"menu_item-2_cat-4_subCat-0_link-1","label":"Partner With Us","openExternal":false,"url":"/core/services/cambridge-open-engage/engage-partner-with-us"},{"id":"menu_item-2_cat-4_subCat-0_link-2","label":"Branded Hubs","openExternal":false,"url":"/core/services/cambridge-open-engage/engage-branded-hubs"},{"id":"menu_item-2_cat-4_subCat-0_link-3","label":"Event Workspaces","openExternal":false,"url":"/core/services/cambridge-open-engage/engage-event-workspaces"},{"id":"menu_item-2_cat-4_subCat-0_link-4","label":"Partner Resources","openExternal":false,"url":"/core/services/cambridge-open-engage/engage-partner-resources"},{"id":"menu_item-2_cat-4_subCat-0_link-5","label":"APSA Preprints","openExternal":false,"url":"/core/services/cambridge-open-engage/engage-apsa-preprints"},{"id":"menu_item-2_cat-4_subCat-0_link-6","label":"APSA Preprints FAQs","openExternal":false,"url":"/core/services/cambridge-open-engage/engage-apsa-preprints-faqs"}]}]}]}]}}</script> </div> <platform-header id='platform-header-wc' platform='core' env='prod' institution-log-in-url='https://shibboleth.cambridge.org/CJOShibb2/index?app=https%3A%2F%2Fwww.cambridge.org%2Fcore%2Fshibboleth%3Fref%3D%2Fcore%2Fjournals%2Frobotica%2Farticle%2Frobotics-goes-prisma%2F5DA1E6B0701411F71E5FFC40F2E53847' manage-account-url='/core/login' base-url='/core/services/' class='platform-header-wc' search-phrase='' is-preview='false' hide-search='false' style="display: none" ></platform-header> <script> const platformHeader = $('#platform-header'); const platformHeaderWc = $('#platform-header-wc'); platformHeaderWc.prop('initialData', window.__PLATFORM_HEADER_DATA__); platformHeaderWc.on('initialized', function () { platformHeader.hide(); platformHeaderWc.show(); }); const createForm = function (searchPhrase) { const form = document.createElement('form'); form.style.display = 'none'; form.method = 'GET'; form.action = '/core/search'; const input = document.createElement('input'); input.type = 'hidden'; input.name = 'q'; input.value = searchPhrase; form.appendChild(input); return form; }; platformHeaderWc.on('search', function (e) { const searchPhrase = e?.originalEvent?.detail[0]?.phrase; if (!searchPhrase) { return; } const form = createForm(searchPhrase); document.body.appendChild(form); $('form').submit(); document.body.removeChild(form); }); </script><div class="flash-message-container" role="alert"> <div class="flash-message"> <script> var AOP = AOP || {}; $( document ).ready(function() { if (AOP.onLoadfocusTriggered !== true) { if($(".alert-box").length) { AOP.enableKeyboardAccess($(".alert-box")); } AOP.onLoadfocusTriggered = true; } }); </script> <div id="ajaxMessages" class="ajaxMessages"></div> </div> </div> <div class="article-wrapper"> <link rel="preload" href="/core/page-component/06cd607.js" as="script"><link rel="preload" href="/core/page-component/20147ec.js" as="script"><link rel="preload" href="/core/page-component/css/styles.css?hash=44de98b" as="style"><link rel="preload" href="/core/page-component/722ace3.js" as="script"><link rel="preload" href="/core/page-component/css/app.css?hash=f1e6144" as="style"><link rel="preload" href="/core/page-component/6f4302c.js" as="script"><link rel="stylesheet" href="/core/page-component/css/styles.css?hash=44de98b"><link rel="stylesheet" href="/core/page-component/css/app.css?hash=f1e6144"> <div role="main" class="page-component"> <div data-server-rendered="true" id="__nuxt"><!----><div id="__layout"><div><div data-v-01274b1d><div style="display: none;" data-v-01274b1d> Hostname: page-component-586b7cd67f-gb8f7 Total loading time: 0 Render date: 2024-11-23T19:20:22.433Z Has data issue: false hasContentIssue false </div> <div class="container-fluid breadcrumbs-wrapper" data-v-3692cf84 data-v-01274b1d><div class="row crumbs-row" data-v-3692cf84><div class="breadcrumbs-wrapper__list" data-v-3692cf84><div class="breadcrumbs-wrapper__list__wrapper" data-v-3692cf84><!----> <div role="navigation" class="breadcrumbs-wrapper__list__wrapper__crumbs" data-v-3692cf84><ul data-test-id="breadcrumbs" class="page-breadcrumbs" data-v-3692cf84><li class="page-breadcrumbs__item" data-v-3692cf84><!----><a href="/core" class="app-link app-link__text app-link--accent" data-v-63dfaf6e data-v-3692cf84><!----><span data-v-63dfaf6e>Home</span> <!----></a></li><li class="page-breadcrumbs__item" data-v-3692cf84><span aria-hidden="true" class="breadcrumbs-wrapper__arrow" data-v-3692cf84>></span><a href="/core/publications/journals" class="app-link app-link__text app-link--accent" data-v-63dfaf6e data-v-3692cf84><!----><span data-v-63dfaf6e>Journals</span> <!----></a></li><li class="page-breadcrumbs__item" data-v-3692cf84><span aria-hidden="true" class="breadcrumbs-wrapper__arrow" data-v-3692cf84>></span><a href="/core/journals/robotica" class="app-link app-link__text app-link--accent" data-v-63dfaf6e data-v-3692cf84><!----><span data-v-63dfaf6e>Robotica</span> <!----></a></li><li class="page-breadcrumbs__item" data-v-3692cf84><span aria-hidden="true" class="breadcrumbs-wrapper__arrow" data-v-3692cf84>></span><a href="/core/journals/robotica/firstview" class="app-link app-link__text app-link--accent" data-v-63dfaf6e data-v-3692cf84><!----><span data-v-63dfaf6e>FirstView</span> <!----></a></li><li class="page-breadcrumbs__item" data-v-3692cf84><span aria-hidden="true" class="breadcrumbs-wrapper__arrow" data-v-3692cf84>></span><span data-v-3692cf84>Robotics goes PRISMA</span></li></ul></div></div> <div class="language" data-v-3692cf84><ul class="language-switch" data-v-6b1118dd data-v-3692cf84><li aria-label="English" data-v-6b1118dd><span class="language-option current divider" data-v-6b1118dd>English</span></li><li aria-label="Français" data-v-6b1118dd><span role="button" tabindex="0" href="#" lang="fr" class="language-option" data-v-6b1118dd> Français </span></li></ul></div></div></div></div> <div class="container container__modified" data-v-01274b1d><!----> <!----> <div class="row" data-v-01274b1d><div role="complementary" aria-label="table of content" class="column__left" data-v-01274b1d><div class="col journal-container row" data-v-146270e8 data-v-01274b1d><img src="https://static.cambridge.org/covers/ROB_0_0_0/robotica.jpg" alt="" class="journal__image" data-v-146270e8> <a href="/core/journals/robotica" class="app-link journal__title app-link__text app-link--underlined" data-v-63dfaf6e data-v-146270e8><!----><span class="text" data-v-63dfaf6e>Robotica <!----></span> <!----></a> <hr aria-hidden="true" class="separator default" data-v-7036083a data-v-146270e8></div> <!----> <div id="toc" class="table-of-content" data-v-01274b1d><h2>Article contents</h2> <div id="toc-list-wrapper" class="table-of-content__wrapper"><ul id="toc-list" class="list"><li class="list__item"><a href="#sec0" class="list__item__link"><span class="toc-title">Abstract</span></a></li> <li class="list__item"><a href="#s1" class="list__item__link"><!----> <span><div class="toc-title">Introduction</div></span></a></li><li class="list__item"><a href="#s2" class="list__item__link"><!----> <span><div class="toc-title">Dynamic manipulation and locomotion</div></span></a></li><li class="list__item"><a href="#s3" class="list__item__link"><!----> <span><div class="toc-title">Aerial robotics</div></span></a></li><li class="list__item"><a href="#s4" class="list__item__link"><!----> <span><div class="toc-title">Physical human-robot interaction</div></span></a></li><li class="list__item"><a href="#s5" class="list__item__link"><!----> <span><div class="toc-title">AI and cognitive robotics</div></span></a></li><li class="list__item"><a href="#s6" class="list__item__link"><!----> <span><div class="toc-title">Industrial robotics</div></span></a></li><li class="list__item"><a href="#s7" class="list__item__link"><!----> <span><div class="toc-title">Medical robotics</div></span></a></li><li class="list__item"><a href="#s8" class="list__item__link"><!----> <span><div class="toc-title">Future Directions</div></span></a></li><li class="list__item"><a href="#s9" class="list__item__link"><!----> <span><div class="toc-title">Conclusion</div></span></a></li><li class="list__item"><a href="#s10" class="list__item__link"><!----> <span><div class="toc-title">Author contribution</div></span></a></li><li class="list__item"><a href="#s11" class="list__item__link"><!----> <span><div class="toc-title">Financial support</div></span></a></li><li class="list__item"><a href="#s12" class="list__item__link"><!----> <span><div class="toc-title">Competing interests</div></span></a></li><li class="list__item"><a href="#s13" class="list__item__link"><!----> <span><div class="toc-title">Ethical approval</div></span></a></li> <!----> <li class="list__item"><a href="#references-list" class="list__item__link"><span class="toc-title">References</span></a></li></ul></div></div></div> <div class="column__main" data-v-01274b1d><div class="row" data-v-01274b1d><div class="column__main__left" data-v-01274b1d><div id="maincontent" class="col" data-v-862424e6 data-v-01274b1d><!----> <hgroup data-v-862424e6><h1 data-v-862424e6>Robotics goes PRISMA</h1> <!----></hgroup> <!----> <!----> <!----> <div class="row part-of" data-v-862424e6><span class="part-of__label" data-v-862424e6> Part of: </span> <a href="/core/product/identifier/THE_40TH_ANNIVERSARY_OF_ROBOTICA/type/BESPOKE_COLLECTION" class="part-of__collection" data-v-f0b31360 data-v-862424e6> The 40th Anniversary of Robotica </a></div> <div class="row published-date" data-v-862424e6><p data-v-862424e6> Published online by Cambridge University Press: <strong data-v-862424e6>20 March 2024</strong></p></div> <!----> <!----> <div class="contributors-details" data-v-99f6eb26 data-v-862424e6><div class="row contributors" data-v-99f6eb26><div class="col" data-v-99f6eb26><div class="row contributor-type" data-v-792406ce data-v-99f6eb26><!----> <div class="contributor-type__contributor" data-v-792406ce><a href="/core/search?filters%5BauthorTerms%5D=Mario%20Selvaggio&eventCode=SE-AU" class="app-link app-link__text app-link--accent" data-v-63dfaf6e data-v-792406ce><!----><span data-v-63dfaf6e>Mario Selvaggio</span> <!----></a> <a target="_blank" href="https://orcid.org/0000-0002-2460-1914" data-test-orcid="Mario Selvaggio" class="app-link contributor-type__contributor__orcid app-link__icon app-link--" data-v-63dfaf6e data-v-792406ce><img src="data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0idXRmLTgiPz4KPHN2ZyB3aWR0aD0iMjIiIGhlaWdodD0iMjIiIHZlcnNpb249IjEuMSIgaWQ9IkxheWVyXzEiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgeG1sbnM6eGxpbms9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkveGxpbmsiIHg9IjBweCIgeT0iMHB4IgoJIHZpZXdCb3g9IjAgMCAyNTYgMjU2IiBzdHlsZT0iZW5hYmxlLWJhY2tncm91bmQ6bmV3IDAgMCAyNTYgMjU2OyIgeG1sOnNwYWNlPSJwcmVzZXJ2ZSI+CjxzdHlsZSB0eXBlPSJ0ZXh0L2NzcyI+Cgkuc3Qwe2ZpbGw6I0E2Q0UzOTt9Cgkuc3Qxe2ZpbGw6I0ZGRkZGRjt9Cjwvc3R5bGU+CjxwYXRoIGNsYXNzPSJzdDAiIGQ9Ik0yNTYsMTI4YzAsNzAuNy01Ny4zLDEyOC0xMjgsMTI4QzU3LjMsMjU2LDAsMTk4LjcsMCwxMjhDMCw1Ny4zLDU3LjMsMCwxMjgsMEMxOTguNywwLDI1Niw1Ny4zLDI1NiwxMjh6Ii8+CjxnPgoJPHBhdGggY2xhc3M9InN0MSIgZD0iTTg2LjMsMTg2LjJINzAuOVY3OS4xaDE1LjR2NDguNFYxODYuMnoiLz4KCTxwYXRoIGNsYXNzPSJzdDEiIGQ9Ik0xMDguOSw3OS4xaDQxLjZjMzkuNiwwLDU3LDI4LjMsNTcsNTMuNmMwLDI3LjUtMjEuNSw1My42LTU2LjgsNTMuNmgtNDEuOFY3OS4xeiBNMTI0LjMsMTcyLjRoMjQuNQoJCWMzNC45LDAsNDIuOS0yNi41LDQyLjktMzkuN2MwLTIxLjUtMTMuNy0zOS43LTQzLjctMzkuN2gtMjMuN1YxNzIuNHoiLz4KCTxwYXRoIGNsYXNzPSJzdDEiIGQ9Ik04OC43LDU2LjhjMCw1LjUtNC41LDEwLjEtMTAuMSwxMC4xYy01LjYsMC0xMC4xLTQuNi0xMC4xLTEwLjFjMC01LjYsNC41LTEwLjEsMTAuMS0xMC4xCgkJQzg0LjIsNDYuNyw4OC43LDUxLjMsODguNyw1Ni44eiIvPgo8L2c+Cjwvc3ZnPgo=" alt="Open the ORCID record for Mario Selvaggio" class="app-icon icon orcid" data-v-d2c09870 data-v-63dfaf6e><!----> <span class="sr-only" data-v-63dfaf6e>[Opens in a new window]</span></a> <span data-v-792406ce>,</span></div><div class="contributor-type__contributor" data-v-792406ce><a href="/core/search?filters%5BauthorTerms%5D=Rocco%20Moccia&eventCode=SE-AU" class="app-link app-link__text app-link--accent" data-v-63dfaf6e data-v-792406ce><!----><span data-v-63dfaf6e>Rocco Moccia</span> <!----></a> <!----> <span data-v-792406ce>,</span></div><div class="contributor-type__contributor" data-v-792406ce><a href="/core/search?filters%5BauthorTerms%5D=Pierluigi%20Arpenti&eventCode=SE-AU" class="app-link app-link__text app-link--accent" data-v-63dfaf6e data-v-792406ce><!----><span data-v-63dfaf6e>Pierluigi Arpenti</span> <!----></a> <!----> <span data-v-792406ce>,</span></div><div class="contributor-type__contributor" data-v-792406ce><a href="/core/search?filters%5BauthorTerms%5D=Riccardo%20Caccavale&eventCode=SE-AU" class="app-link app-link__text app-link--accent" data-v-63dfaf6e data-v-792406ce><!----><span data-v-63dfaf6e>Riccardo Caccavale</span> <!----></a> <!----> <span data-v-792406ce>,</span></div><div class="contributor-type__contributor" data-v-792406ce><a href="/core/search?filters%5BauthorTerms%5D=Fabio%20Ruggiero&eventCode=SE-AU" class="app-link app-link__text app-link--accent" data-v-63dfaf6e data-v-792406ce><!----><span data-v-63dfaf6e>Fabio Ruggiero</span> <!----></a> <!----> <span data-v-792406ce>,</span></div><div class="contributor-type__contributor" data-v-792406ce><a href="/core/search?filters%5BauthorTerms%5D=Jonathan%20Cacace&eventCode=SE-AU" class="app-link app-link__text app-link--accent" data-v-63dfaf6e data-v-792406ce><!----><span data-v-63dfaf6e>Jonathan Cacace</span> <!----></a> <!----> <span data-v-792406ce>,</span></div><div class="contributor-type__contributor" data-v-792406ce><a href="/core/search?filters%5BauthorTerms%5D=Fanny%20Ficuciello&eventCode=SE-AU" class="app-link app-link__text app-link--accent" data-v-63dfaf6e data-v-792406ce><!----><span data-v-63dfaf6e>Fanny Ficuciello</span> <!----></a> <!----> <span data-v-792406ce>,</span></div><div class="contributor-type__contributor" data-v-792406ce><a href="/core/search?filters%5BauthorTerms%5D=Alberto%20Finzi&eventCode=SE-AU" class="app-link app-link__text app-link--accent" data-v-63dfaf6e data-v-792406ce><!----><span data-v-63dfaf6e>Alberto Finzi</span> <!----></a> <!----> <span data-v-792406ce>,</span></div><div class="contributor-type__contributor" data-v-792406ce><a href="/core/search?filters%5BauthorTerms%5D=Vincenzo%20Lippiello&eventCode=SE-AU" class="app-link app-link__text app-link--accent" data-v-63dfaf6e data-v-792406ce><!----><span data-v-63dfaf6e>Vincenzo Lippiello</span> <!----></a> <!----> <span data-v-792406ce> and</span></div><div class="contributor-type__contributor" data-v-792406ce><a href="/core/search?filters%5BauthorTerms%5D=Luigi%20Villani&eventCode=SE-AU" class="app-link app-link__text app-link--accent" data-v-63dfaf6e data-v-792406ce><!----><span data-v-63dfaf6e>Luigi Villani</span> <!----></a> <!----> <span data-v-792406ce></span></div> <a href="#" class="app-link app-link__text-icon app-link--secondary reverse" data-v-63dfaf6e data-v-792406ce><img src="data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMTEiIGhlaWdodD0iNiIgdmlld0JveD0iMCAwIDExIDYiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CjxwYXRoIGZpbGwtcnVsZT0iZXZlbm9kZCIgY2xpcC1ydWxlPSJldmVub2RkIiBkPSJNNS41MDAwNiA2QzUuMzI4NDYgNiA1LjE2Mzk4IDUuOTMzMzkgNS4wNDI1MiA1LjgxNTAxTDAuMTg5NDQ4IDEuMDc3OEMtMC4wNjMxNzYzIDAuODMxMjU3IC0wLjA2MzE3NjMgMC40MzE0NTIgMC4xODk2MSAwLjE4NDkwOEMwLjQ0MjM5NiAtMC4wNjE2MzYgMC44NTIwNjIgLTAuMDYxNjM2IDEuMTA0NTIgMC4xODQ5MDhMNS41MDAwNiA0LjQ3NTc1TDkuODk1NiAwLjE4NDkwOEMxMC4xNDgyIC0wLjA2MTYzNiAxMC41NTc5IC0wLjA2MTYzNiAxMC44MTA1IDAuMTg0OTA4QzExLjA2MzEgMC40MzE0NTIgMTEuMDYzMSAwLjgzMTEgMTAuODEwNyAxLjA3NzhMNS45NTc2IDUuODE1MDFDNS44MzYxNCA1LjkzMzM5IDUuNjcxNjYgNiA1LjUwMDA2IDZaIiBmaWxsPSIjNzA3MDcwIi8+Cjwvc3ZnPgo=" alt="" class="app-icon icon arrow-down" data-v-d2c09870 data-v-63dfaf6e><span class="text" data-v-63dfaf6e>...Show all authors <!----></span> <!----></a></div> <!----></div> <div class="col-2 collapse-link" data-v-99f6eb26><a href="#authors-details" data-toggle="collapse" aria-expanded="false" aria-controls="authors-details" class="app-link collapsed app-link__text-icon app-link--secondary reverse" data-v-63dfaf6e data-v-99f6eb26><img src="data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMTEiIGhlaWdodD0iNiIgdmlld0JveD0iMCAwIDExIDYiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CjxwYXRoIGZpbGwtcnVsZT0iZXZlbm9kZCIgY2xpcC1ydWxlPSJldmVub2RkIiBkPSJNNS41MDAwNiA2QzUuMzI4NDYgNiA1LjE2Mzk4IDUuOTMzMzkgNS4wNDI1MiA1LjgxNTAxTDAuMTg5NDQ4IDEuMDc3OEMtMC4wNjMxNzYzIDAuODMxMjU3IC0wLjA2MzE3NjMgMC40MzE0NTIgMC4xODk2MSAwLjE4NDkwOEMwLjQ0MjM5NiAtMC4wNjE2MzYgMC44NTIwNjIgLTAuMDYxNjM2IDEuMTA0NTIgMC4xODQ5MDhMNS41MDAwNiA0LjQ3NTc1TDkuODk1NiAwLjE4NDkwOEMxMC4xNDgyIC0wLjA2MTYzNiAxMC41NTc5IC0wLjA2MTYzNiAxMC44MTA1IDAuMTg0OTA4QzExLjA2MzEgMC40MzE0NTIgMTEuMDYzMSAwLjgzMTEgMTAuODEwNyAxLjA3NzhMNS45NTc2IDUuODE1MDFDNS44MzYxNCA1LjkzMzM5IDUuNjcxNjYgNiA1LjUwMDA2IDZaIiBmaWxsPSIjNzA3MDcwIi8+Cjwvc3ZnPgo=" alt="" class="app-icon icon arrow-down" data-v-d2c09870 data-v-63dfaf6e><span class="text" data-v-63dfaf6e>Show author details <!----></span> <!----></a></div></div> <hr aria-hidden="true" class="separator default" data-v-7036083a data-v-99f6eb26> <dl id="authors-details" class="authors-details collapse" data-v-2edb8da6 data-v-99f6eb26><div data-test-author="Mario Selvaggio" class="row author" data-v-2edb8da6><dt class="col-12 col-sm-2 title" data-v-2edb8da6>Mario Selvaggio*</dt> <dd class="col content d-inline d-sm-flex" data-v-2edb8da6><span class="content__title" data-v-2edb8da6>Affiliation:</span> <div class="d-sm-flex flex-column flex-sm-1 d-inline" data-v-2edb8da6><span data-v-2edb8da6><span data-v-2edb8da6>Department of Electrical Engineering and Information Technology, University of Naples Federico II, Naples, Italy</span> </span></div></dd></div><div data-test-author="Rocco Moccia" class="row author" data-v-2edb8da6><dt class="col-12 col-sm-2 title" data-v-2edb8da6>Rocco Moccia</dt> <dd class="col content d-inline d-sm-flex" data-v-2edb8da6><span class="content__title" data-v-2edb8da6>Affiliation:</span> <div class="d-sm-flex flex-column flex-sm-1 d-inline" data-v-2edb8da6><span data-v-2edb8da6><span data-v-2edb8da6>Department of Electrical Engineering and Information Technology, University of Naples Federico II, Naples, Italy</span> </span></div></dd></div><div data-test-author="Pierluigi Arpenti" class="row author" data-v-2edb8da6><dt class="col-12 col-sm-2 title" data-v-2edb8da6>Pierluigi Arpenti</dt> <dd class="col content d-inline d-sm-flex" data-v-2edb8da6><span class="content__title" data-v-2edb8da6>Affiliation:</span> <div class="d-sm-flex flex-column flex-sm-1 d-inline" data-v-2edb8da6><span data-v-2edb8da6><span data-v-2edb8da6>Department of Electrical Engineering and Information Technology, University of Naples Federico II, Naples, Italy</span> </span></div></dd></div><div data-test-author="Riccardo Caccavale" class="row author" data-v-2edb8da6><dt class="col-12 col-sm-2 title" data-v-2edb8da6>Riccardo Caccavale</dt> <dd class="col content d-inline d-sm-flex" data-v-2edb8da6><span class="content__title" data-v-2edb8da6>Affiliation:</span> <div class="d-sm-flex flex-column flex-sm-1 d-inline" data-v-2edb8da6><span data-v-2edb8da6><span data-v-2edb8da6>Department of Electrical Engineering and Information Technology, University of Naples Federico II, Naples, Italy</span> </span></div></dd></div><div data-test-author="Fabio Ruggiero" class="row author" data-v-2edb8da6><dt class="col-12 col-sm-2 title" data-v-2edb8da6>Fabio Ruggiero</dt> <dd class="col content d-inline d-sm-flex" data-v-2edb8da6><span class="content__title" data-v-2edb8da6>Affiliation:</span> <div class="d-sm-flex flex-column flex-sm-1 d-inline" data-v-2edb8da6><span data-v-2edb8da6><span data-v-2edb8da6>Department of Electrical Engineering and Information Technology, University of Naples Federico II, Naples, Italy</span> </span></div></dd></div><div data-test-author="Jonathan Cacace" class="row author" data-v-2edb8da6><dt class="col-12 col-sm-2 title" data-v-2edb8da6>Jonathan Cacace</dt> <dd class="col content d-inline d-sm-flex" data-v-2edb8da6><span class="content__title" data-v-2edb8da6>Affiliation:</span> <div class="d-sm-flex flex-column flex-sm-1 d-inline" data-v-2edb8da6><span data-v-2edb8da6><span data-v-2edb8da6>Department of Electrical Engineering and Information Technology, University of Naples Federico II, Naples, Italy</span> </span></div></dd></div><div data-test-author="Fanny Ficuciello" class="row author" data-v-2edb8da6><dt class="col-12 col-sm-2 title" data-v-2edb8da6>Fanny Ficuciello</dt> <dd class="col content d-inline d-sm-flex" data-v-2edb8da6><span class="content__title" data-v-2edb8da6>Affiliation:</span> <div class="d-sm-flex flex-column flex-sm-1 d-inline" data-v-2edb8da6><span data-v-2edb8da6><span data-v-2edb8da6>Department of Electrical Engineering and Information Technology, University of Naples Federico II, Naples, Italy</span> </span></div></dd></div><div data-test-author="Alberto Finzi" class="row author" data-v-2edb8da6><dt class="col-12 col-sm-2 title" data-v-2edb8da6>Alberto Finzi</dt> <dd class="col content d-inline d-sm-flex" data-v-2edb8da6><span class="content__title" data-v-2edb8da6>Affiliation:</span> <div class="d-sm-flex flex-column flex-sm-1 d-inline" data-v-2edb8da6><span data-v-2edb8da6><span data-v-2edb8da6>Department of Electrical Engineering and Information Technology, University of Naples Federico II, Naples, Italy</span> </span></div></dd></div><div data-test-author="Vincenzo Lippiello" class="row author" data-v-2edb8da6><dt class="col-12 col-sm-2 title" data-v-2edb8da6>Vincenzo Lippiello</dt> <dd class="col content d-inline d-sm-flex" data-v-2edb8da6><span class="content__title" data-v-2edb8da6>Affiliation:</span> <div class="d-sm-flex flex-column flex-sm-1 d-inline" data-v-2edb8da6><span data-v-2edb8da6><span data-v-2edb8da6>Department of Electrical Engineering and Information Technology, University of Naples Federico II, Naples, Italy</span> </span></div></dd></div><div data-test-author="Luigi Villani" class="row author" data-v-2edb8da6><dt class="col-12 col-sm-2 title" data-v-2edb8da6>Luigi Villani</dt> <dd class="col content d-inline d-sm-flex" data-v-2edb8da6><span class="content__title" data-v-2edb8da6>Affiliation:</span> <div class="d-sm-flex flex-column flex-sm-1 d-inline" data-v-2edb8da6><span data-v-2edb8da6><span data-v-2edb8da6>Department of Electrical Engineering and Information Technology, University of Naples Federico II, Naples, Italy</span> </span></div></dd></div><div data-test-author="Bruno Siciliano" class="row author" data-v-2edb8da6><dt class="col-12 col-sm-2 title" data-v-2edb8da6>Bruno Siciliano</dt> <dd class="col content d-inline d-sm-flex" data-v-2edb8da6><span class="content__title" data-v-2edb8da6>Affiliation:</span> <div class="d-sm-flex flex-column flex-sm-1 d-inline" data-v-2edb8da6><span data-v-2edb8da6><span data-v-2edb8da6>Department of Electrical Engineering and Information Technology, University of Naples Federico II, Naples, Italy</span> </span></div></dd></div> <div class="row" data-v-2edb8da6><dt class="col-sm-2 col-12 title" data-v-2edb8da6> * </dt> <dd class="col content" data-v-2edb8da6><div class="row" data-v-2edb8da6><div class="d-sm-flex d-inline flex-sm-1 flex-sm-wrap" data-v-2edb8da6><div class="d-inline" data-v-2edb8da6><span data-v-2edb8da6><div class="corresp"><span class="bold">Corresponding author:</span> Mario Selvaggio; Email: <a href="mailto:mario.selvaggio@unina.it">mario.selvaggio@unina.it</a></div></span></div></div></div></dd></div> <hr aria-hidden="true" class="separator default" data-v-7036083a data-v-2edb8da6></dl></div></div> <div id="app-tabs" class="tabs" data-v-1d90c6ce data-v-01274b1d><div id="app-tabs-wrapper" class="tabs__wrapper" data-v-1d90c6ce><div role="navigation" aria-label="tab navigation" class="container" data-v-1d90c6ce><a data-toggle="collapse" href="#appTabs" role="button" aria-expanded="false" aria-controls="appTabs" class="tabs__collapse collapsed d-sm-none d-print-none" data-v-1d90c6ce><span data-v-1d90c6ce></span> <span class="tabs-arrow-up" data-v-1d90c6ce></span> <span class="tabs-arrow-down" data-v-1d90c6ce></span></a> <ul id="appTabs" role="tablist" class="nav nav-tabs tabs__list collapse show" data-v-1d90c6ce><li role="none" class="tabs__tab" data-v-1d90c6ce><a aria-selected="true" href="#article-tab" role="tab" aria-controls="article-tab" tabindex="-1" class="active" data-v-1d90c6ce>Article </a></li><li role="none" class="tabs__tab" data-v-1d90c6ce><a href="#figures-tab" role="tab" aria-controls="figures-tab" tabindex="-1" data-v-1d90c6ce>Figures </a></li><li role="none" class="tabs__tab" data-v-1d90c6ce><a href="#metrics-tab" role="tab" aria-controls="metrics-tab" tabindex="-1" data-v-1d90c6ce>Metrics </a></li></ul></div></div> <div tabindex="0" role="region" aria-label="Tab content" class="tab-content" data-v-1d90c6ce><div id="article-tab" class="tab-pane active" data-v-43a4d572><div id="toc-mobile" class="table-of-content-mobile d-sm-none" data-v-43a4d572><a data-toggle="collapse" href="#toc-list-mobile" role="button" aria-expanded="false" class="toc-collapse-switch collapsed"><span class="default-heading">Article contents</span> <!----> <span class="toc-arrow-up"></span> <span class="toc-arrow-down"></span></a> <ul id="toc-list-mobile" class="list collapse"><li class="list__item"><a href="#sec0" class="list__item__link"><span class="toc-title">Abstract</span></a></li> <li class="list__item"><a href="#s1" class="list__item__link"><!----> <span>Introduction</span></a></li><li class="list__item"><a href="#s2" class="list__item__link"><!----> <span>Dynamic manipulation and locomotion</span></a></li><li class="list__item"><a href="#s3" class="list__item__link"><!----> <span>Aerial robotics</span></a></li><li class="list__item"><a href="#s4" class="list__item__link"><!----> <span>Physical human-robot interaction</span></a></li><li class="list__item"><a href="#s5" class="list__item__link"><!----> <span>AI and cognitive robotics</span></a></li><li class="list__item"><a href="#s6" class="list__item__link"><!----> <span>Industrial robotics</span></a></li><li class="list__item"><a href="#s7" class="list__item__link"><!----> <span>Medical robotics</span></a></li><li class="list__item"><a href="#s8" class="list__item__link"><!----> <span>Future Directions</span></a></li><li class="list__item"><a href="#s9" class="list__item__link"><!----> <span>Conclusion</span></a></li><li class="list__item"><a href="#s10" class="list__item__link"><!----> <span>Author contribution</span></a></li><li class="list__item"><a href="#s11" class="list__item__link"><!----> <span>Financial support</span></a></li><li class="list__item"><a href="#s12" class="list__item__link"><!----> <span>Competing interests</span></a></li><li class="list__item"><a href="#s13" class="list__item__link"><!----> <span>Ethical approval</span></a></li> <!----> <li class="list__item"><a href="#references-list" class="list__item__link"><span class="toc-title">References</span></a></li></ul></div> <div class="action-bar" data-v-43a4d572><div class="row items"><!----> <div class="app-dropdown d-print-none" data-v-fab090b8 data-v-113567da data-v-43a4d572><button aria-expanded="false" data-test-id="buttonSavePDFOptions" id="save-pdf-dropdown" class="app-button dropdown-menu-button app-button__text-icon app-button--secondary" data-v-2a038744 data-v-fab090b8><img src="/core/page-component/img/save-pdf-icon.080470e.svg" alt="" class="app-icon icon save-pdf" data-v-d2c09870 data-v-2a038744> <span class="text" data-v-2a038744>Save PDF</span></button> <div aria-labelledby="save-pdf-dropdown" class="app-dropdown__menu" style="display:none;" data-v-fab090b8><div class="pdf-buttons" data-v-fab090b8 data-v-113567da><a href="/core/services/aop-cambridge-core/content/view/5DA1E6B0701411F71E5FFC40F2E53847/S026357472400033Xa.pdf/robotics-goes-prisma.pdf" download="" role="link" class="app-link dropdown-item app-link__text-icon app-link--" data-v-63dfaf6e data-v-113567da><img src="/core/page-component/img/pdf-download-icon.c7fb40c.svg" alt="" class="app-icon icon pdf-download" data-v-d2c09870 data-v-63dfaf6e><span class="text" data-v-63dfaf6e>Save PDF (1 mb) <!----></span> <!----></a> <a target="_blank" href="/core/services/aop-cambridge-core/content/view/5DA1E6B0701411F71E5FFC40F2E53847/S026357472400033Xa.pdf/robotics-goes-prisma.pdf" role="link" class="app-link dropdown-item app-link__text-icon app-link--" data-v-63dfaf6e data-v-113567da><img src="/core/page-component/img/pdf-download-icon.c7fb40c.svg" alt="" class="app-icon icon pdf-download" data-v-d2c09870 data-v-63dfaf6e><span class="text" data-v-63dfaf6e>View PDF <!----></span> <span class="sr-only" data-v-63dfaf6e>[Opens in a new window]</span></a> <!----> <!----> <button aria-expanded="false" data-reveal-id="dropboxModal" role="button" class="app-button dropdown-item app-button__text-icon app-button--" data-v-2a038744 data-v-113567da><img src="/core/page-component/img/dropbox-icon.3d57046.svg" alt="" class="app-icon icon dropbox" data-v-d2c09870 data-v-2a038744> <span class="text" data-v-2a038744>Save to Dropbox</span></button> <button aria-expanded="false" data-reveal-id="googleDriveModal" role="button" class="app-button dropdown-item app-button__text-icon app-button--" data-v-2a038744 data-v-113567da><img src="/core/page-component/img/google-drive-icon.a50193b.svg" alt="" class="app-icon icon google-drive" data-v-d2c09870 data-v-2a038744> <span class="text" data-v-2a038744>Save to Google Drive</span></button> <button aria-expanded="false" data-reveal-id="kindleModal" data-location="landingPage" role="button" class="app-button dropdown-item app-button__text-icon app-button--" data-v-2a038744 data-v-113567da><img src="data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiPz4KPHN2ZyB3aWR0aD0iMzRweCIgaGVpZ2h0PSIzNHB4IiB2aWV3Qm94PSIwIDAgMzQgMzQiIHZlcnNpb249IjEuMSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIiB4bWxuczp4bGluaz0iaHR0cDovL3d3dy53My5vcmcvMTk5OS94bGluayI+CiAgICA8dGl0bGU+REI4RjgzNzMtNDExMS00OTNCLUI0QzItQkY5MTYxMENBQ0MxPC90aXRsZT4KICAgIDxnIGlkPSJTdHlsZXNoZWV0IiBzdHJva2U9Im5vbmUiIHN0cm9rZS13aWR0aD0iMSIgZmlsbD0ibm9uZSIgZmlsbC1ydWxlPSJldmVub2RkIj4KICAgICAgICA8ZyB0cmFuc2Zvcm09InRyYW5zbGF0ZSgtMTIxOC4wMDAwMDAsIC03OTYuMDAwMDAwKSIgaWQ9IkJVVFRPTlMiPgogICAgICAgICAgICA8ZyB0cmFuc2Zvcm09InRyYW5zbGF0ZSgxMDAuMDAwMDAwLCA1OTcuMDAwMDAwKSI+CiAgICAgICAgICAgICAgICA8ZyBpZD0iS2luZGxlLWljb24iIHRyYW5zZm9ybT0idHJhbnNsYXRlKDExMTkuMDAwMDAwLCAyMDAuMDAwMDAwKSI+CiAgICAgICAgICAgICAgICAgICAgPGNpcmNsZSBpZD0iT3ZhbCIgc3Ryb2tlPSIjNTk1OTU5IiBjeD0iMTYiIGN5PSIxNiIgcj0iMTYiPjwvY2lyY2xlPgogICAgICAgICAgICAgICAgICAgIDxwb2x5Z29uIGlkPSJGaWxsLTEiIGZpbGw9IiM1OTU5NTkiIHBvaW50cz0iMTAgNCAxMCAyOCAxMi41MjI4OTY3IDI4IDEyLjUyMjg5NjcgMjAuMzY3MzQ2NCAxOS4yMTg0MTczIDI4IDIyLjU3NDE3MyAyOCAxNC42NDkzNTI1IDE4Ljk2NjQ3NzQgMjEuNTc3NzQwNSAxMi4wNDAwODgzIDE4LjE0MDM2NDUgMTEuOTA4ODI5NyAxMi41MjI4OTY3IDE3LjUyNTYzMTIgMTIuNTIyODk2NyA0Ij48L3BvbHlnb24+CiAgICAgICAgICAgICAgICA8L2c+CiAgICAgICAgICAgIDwvZz4KICAgICAgICA8L2c+CiAgICA8L2c+Cjwvc3ZnPg==" alt="" class="app-icon icon kindle" data-v-d2c09870 data-v-2a038744> <span class="text" data-v-2a038744>Save to Kindle</span></button> <!----></div> <button aria-label="Close Save PDF dropdown" aria-expanded="false" class="app-button close-icon app-button__icon app-button--" data-v-2a038744 data-v-fab090b8><img src="/core/page-component/img/close-icon.194b28a.svg" alt="" class="app-icon icon close" data-v-d2c09870 data-v-2a038744> <!----></button></div></div> <div class="app-dropdown d-print-none share-dropdown" data-v-fab090b8 data-v-beffd87c data-v-43a4d572><button aria-expanded="false" data-test-id="buttonShareOptions" id="share-dropdown" class="app-button dropdown-menu-button app-button__text-icon app-button--secondary" data-v-2a038744 data-v-fab090b8><img src="/core/page-component/img/share-icon.cbcfad8.svg" alt="" class="app-icon icon share" data-v-d2c09870 data-v-2a038744> <span class="text" data-v-2a038744>Share</span></button> <div aria-labelledby="share-dropdown" class="app-dropdown__menu" style="display:none;" data-v-fab090b8><div aria-labelledby="dropdown-share-button" data-v-fab090b8 data-v-beffd87c><!----> <div class="social-share-container" data-v-fab090b8 data-v-beffd87c><!----></div></div> <button aria-label="Close Share dropdown" aria-expanded="false" class="app-button close-icon app-button__icon app-button--" data-v-2a038744 data-v-fab090b8><img src="/core/page-component/img/close-icon.194b28a.svg" alt="" class="app-icon icon close" data-v-d2c09870 data-v-2a038744> <!----></button></div></div> <button aria-expanded="false" data-test-id="buttonCiteOptions" data-prod-id="5DA1E6B0701411F71E5FFC40F2E53847" data-toggle="modal" href="#cite-modal" class="app-button cite-button d-print-none app-button__text-icon app-button--secondary export-citation-product" data-v-2a038744 data-v-2ce5f61d data-v-43a4d572><img src="/core/page-component/img/cite-icon.44eaaa4.svg" alt="" class="app-icon icon cite" data-v-d2c09870 data-v-2a038744> <span class="text" data-v-2a038744>Cite</span></button> <!----> <a target="_blank" href="https://s100.copyright.com/AppDispatchServlet?publisherName=CUP&publication=ROB&title=Robotics%20goes%20PRISMA&publicationDate=20%20March%202024&author=Mario%20Selvaggio%2C%20Rocco%20Moccia%2C%20Pierluigi%20Arpenti%2C%20Riccardo%20Caccavale%2C%20Fabio%20Ruggiero%2C%20Jonathan%20Cacace%2C%20Fanny%20Ficuciello%2C%20Alberto%20Finzi%2C%20Vincenzo%20Lippiello%2C%20Luigi%20Villani%2C%20Bruno%20Siciliano©right=%C2%A9%20The%20Author(s)%2C%202024.%20Published%20by%20Cambridge%20University%20Press&contentID=10.1017%2FS026357472400033X&startPage=1&endPage=28&orderBeanReset=True&volumeNum=&issueNum=&oa=" data-test-id="buttonRightLink" class="app-link rights-link d-print-none app-link__text-icon app-link--accent" data-v-63dfaf6e data-v-92ee52a2 data-v-43a4d572><img src="/core/page-component/img/rights-icon.d4a677c.svg" alt="" class="app-icon icon rights" data-v-d2c09870 data-v-63dfaf6e><span class="text" data-v-63dfaf6e>Rights & Permissions <!----></span> <span class="sr-only" data-v-63dfaf6e>[Opens in a new window]</span></a></div> <hr aria-hidden="true" class="separator default" data-v-7036083a></div> <div class="share-modal-overlay" style="display:none;" data-v-43a4d572><!----></div> <div data-spy="scroll" data-target="#toc" class="scrollspy-content" data-v-43a4d572><!----> <div id="sec0" class="col article-abstract sec" data-v-2fa8b348 data-v-43a4d572><div class="abstract-text-container" data-v-2fa8b348><div lang="en"><h2>Abstract</h2> <!----> <div class="abstract-content"><div class="abstract" data-abstract-type="normal"><p>In this article, we review the main results achieved by the research activities carried out at PRISMA Lab of the University of Naples Federico II where, for 35 years, an interdisciplinary team of experts developed robots that are ultimately useful to humans. We summarize the key contributions made in the last decade in the six research areas of dynamic manipulation and locomotion, aerial robotics, human-robot interaction, artificial intelligence and cognitive robotics, industrial robotics, and medical robotics. After a brief overview of each research field, the most significant methodologies and results are reported and discussed, highlighting their cross-disciplinary and translational aspects. Finally, the potential future research directions identified are discussed.</p></div></div> <hr aria-hidden="true" class="abstract-divider separator default" data-v-7036083a></div></div> <!----> <!----> <!----></div> <!----> <div class="keywords" data-v-86c27100 data-v-43a4d572><h2 data-v-86c27100>Keywords</h2> <div class="row keywords__pills" data-v-86c27100><a href="/core/search?filters[keywords]=aerial robotics" data-v-f0b31360 data-v-86c27100><span data-v-f0b31360 data-v-86c27100>aerial robotics</span></a><a href="/core/search?filters[keywords]=control of robotic systems" data-v-f0b31360 data-v-86c27100><span data-v-f0b31360 data-v-86c27100>control of robotic systems</span></a><a href="/core/search?filters[keywords]=legged robots" data-v-f0b31360 data-v-86c27100><span data-v-f0b31360 data-v-86c27100>legged robots</span></a><a href="/core/search?filters[keywords]=non-prehensile manipulation" data-v-f0b31360 data-v-86c27100><span data-v-f0b31360 data-v-86c27100>non-prehensile manipulation</span></a><a href="/core/search?filters[keywords]=surgical robots" data-v-f0b31360 data-v-86c27100><span data-v-f0b31360 data-v-86c27100>surgical robots</span></a><a href="/core/search?filters[keywords]=teleoperation" data-v-f0b31360 data-v-86c27100><span data-v-f0b31360 data-v-86c27100>teleoperation</span></a></div> <hr aria-hidden="true" class="separator default" data-v-7036083a data-v-86c27100></div> <!----> <dl class="article-details" data-v-6e32a161 data-v-43a4d572><div class="row" data-v-6e32a161><dt class="col-12 col-sm-3 col-md-2_5 title" data-v-6e32a161> Type </dt> <dd class="col content" data-v-6e32a161>Review Article</dd></div> <div class="row" data-v-6e32a161><dt class="col-12 col-sm-3 col-md-2_5 title" data-v-6e32a161> Information </dt> <dd class="col content" data-v-6e32a161><div class="content__journal" data-v-6e32a161><a href="/core/journals/robotica" class="app-link app-link__text app-link--underlined" data-v-63dfaf6e data-v-6e32a161><!----><span class="text" data-v-63dfaf6e>Robotica <!----></span> <!----></a> <span data-v-6e32a161> , <a href="/core/journals/robotica/firstview" class="app-link app-link__text app-link--underlined" data-v-63dfaf6e data-v-6e32a161><!----><span class="text" data-v-63dfaf6e>First View <!----></span> <!----></a></span> <!----> <span data-v-6e32a161>, pp. 1 - 28</span> <!----></div> <div class="doi-data" data-v-6e32a161><div data-v-6e32a161>DOI: <a target="_blank" href="https://doi.org/10.1017/S026357472400033X" class="app-link app-link__text app-link--accent" data-v-63dfaf6e data-v-6e32a161><!----><span class="text" data-v-63dfaf6e>https://doi.org/10.1017/S026357472400033X <!----></span> <span class="sr-only" data-v-63dfaf6e>[Opens in a new window]</span></a></div> <!----></div> <!----> <!----></dd></div> <!----> <!----> <div class="row" data-v-6e32a161><dt class="col-12 col-sm-3 col-md-2_5 title" data-v-6e32a161> Copyright </dt> <dd class="col content" data-v-6e32a161><div data-v-6e32a161> © The Author(s), 2024. Published by Cambridge University Press </div></dd></div></dl> <!----> <div id="content-container" class="content-container" data-v-43a4d572><div class="content-box"><div class="article review-article NLM"> <div class="body"> <div class="sec intro" data-magellan-destination="s1" id="s1"> <h2 class="A"><span class="label">1.</span> Introduction</h2> <p class="p"> Developing robots that are ultimately useful and acceptable to humans has always been one of the major motivations for research in robotics. Potentially, robots can alleviate humans from performing dangerous jobs or working in hazardous conditions. They can handle lifting heavy weights, toxic substances, and repetitive tasks. Inspired by this, in labs and research centers across the world, interdisciplinary teams of experts coordinate their everyday efforts to pursue the goal of developing intelligent robotic systems that fulfill this scope. It is their duty and dream to push the boundary of robotics as a science, overcoming the current theoretical and technological limits, and making robots work closer to humans in our everyday living spaces. In this article, we review the main results achieved in this direction during the last decade by the robotics research carried out at PRISMA Lab of the University of Naples Federico II. The lab has been active in robotics research for 35 years now, and its team is internationally recognized in the community for its achievements. Given this long-standing expertise, the research work carried out at PRISMA Lab is tied to a solid basis and aims to bring groundbreaking results that have far-reaching impacts.</p><section><div class="fig" data-magellan-destination="f1" id="f1"> <div class="figure-thumb"><img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig1.png?pub-status=live" class="aop-lazy-load-image" width="4270" height="3215" data-original-image="/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig1.png" data-zoomable="true"></div> <div class="caption"><p class="p"> </p><p class="p"><span class="label">Figure 1.</span> Graphical representation of the six research areas and sub-areas dealt with within the PRISMA Lab at the University of Naples Federico II. This article proposes an overview of the main problems addressed in these fields and discuss potential future directions on the topics.</p> </div></div></section> <p class="p"> Over the years, the team effort has been directed mainly toward six rapidly growing areas (and related sub-areas) of robotics that are dynamic manipulation and locomotion, aerial robotics, physical human-robot interaction (HRI), artificial intelligence (AI) and cognitive robotics, industrial robotics, and medical robotics (see Fig. <a class="xref fig" href="#f1">1</a>). The six research areas listed above fulfill in different ways the primary scope of supporting humans in their daily activities. Advanced manipulation skills allow robots to naturally act in anthropic environments by exploiting available affordances that are typically designed for humans. In this context, dynamic and non-prehensile manipulation techniques allow robots to extend their manipulative capabilities as described in Sec. <a class="xref sec" href="#s2">2</a>. Surprisingly, many methodologies used for non-prehensile manipulation also apply to legged robot locomotion. Motivated by this, the same section provides insights from recent legged robotics research. Aerial robots have been developed to perform tasks in high altitudes and difficult-to-access scenarios that cannot be easily reached or are too dangerous for human operators. To this end, the capability of interacting with the environment was recently integrated into control frameworks for aerial robots as can be seen in Sec. <a class="xref sec" href="#s3">3</a>. Robots can support humans by substituting or by cooperating with them either proximally or remotely. In both cases, issues related to the interaction between a human and a robot may arise. As detailed in Sec. <a class="xref sec" href="#s4">4</a>, physical HRI techniques must be considered to guarantee a safe and dependable behavior of collaborative robots (or cobots), for example, by designing suitable control schemes for reactive collision avoidance, compliance, and task-based interaction. In addition, in both human-robot cooperation and autonomous task execution, robots exhibiting cognitive capabilities are beneficial. We tackle the issue of deploying robots in dynamic and human-populated environments by integrating AI-based methods with cognitive control frameworks into robotic systems to allow flexible execution, planning, and monitoring of structured tasks as proposed in Sec. <a class="xref sec" href="#s5">5</a>. The manipulation and AI methodologies were recently adopted in the field of industrial robotics by considering logistics as a main application as can be seen in Sec. <a class="xref sec" href="#s6">6</a>. In this case, intelligent robotic systems are deployed to alleviate human operators from the execution of tedious and repetitive tasks. Differently, in the medical field, robots are directly conceived and programmed to extend human capabilities by performing super-precise surgical operations or acting as limb substitutes as described in Sec. <a class="xref sec" href="#s7">7</a>.</p> <p class="p"> In the following sections, we report the main achievements in each of these areas, highlighting the adopted methodologies and the key contributions with respect to the state of the art on the topic. Finally, potential future research directions in each field are discussed in Sec. <a class="xref sec" href="#s8">8</a>. Thus, the main contributions of this paper can be listed as follows:</p><ul class="list nomark"> <li class="list-item"> <p class="p"><span class="label">•</span> We present a thorough review of the most recent work in the above-mentioned six research areas dealt with by the PRISMA Lab, highlighting the adopted methodologies and the key results achieved in the fields;</p> </li> <li class="list-item"> <p class="p"><span class="label">•</span> For each research area, we propose an overview of the field, reporting both seminal and state-of-the-art works, and identify potential future research directions on the topics.</p> </li> </ul> </div> <div class="sec other" data-magellan-destination="s2" id="s2"> <h2 class="A"><span class="label">2.</span> Dynamic manipulation and locomotion</h2> <p class="p"> The ways robots use to transport themselves or objects around share many similarities. Robots realize manipulation and locomotion tasks by physically establishing contacts and regulating the exchange of forces with the world around them [<a class="xref bibr" href="#ref1"><span class="show-for-sr">Reference Suomalainen, Karayiannidis and Kyrki</span>1</a>]. With the technological advancements in both sensing and actuation speed, it is now possible to manipulate an object speedily and achieve stable locomotion across challenging terrains [<a class="xref bibr" href="#ref2"><span class="show-for-sr">Reference Yang, Zhang, Zeng, Agrawal and Sreenath</span>2</a>]. In dynamic manipulation and locomotion, an important role is played by forces and accelerations, which are used together with kinematics, statics, and quasi-static forces to achieve the task. Dynamic non-prehensile manipulation of an object extends its feasible movements exploiting motion primitives such as rolling [<a class="xref bibr" href="#ref3"><span class="show-for-sr">Reference Serra, Ruggiero, Donaire, Buonocore, Lippiello and Siciliano</span>3</a>], pushing [<a class="xref bibr" href="#ref4"><span class="show-for-sr">Reference Chai, Peng and Tsao</span>4</a>], throwing, and tossing [<a class="xref bibr" href="#ref5"><span class="show-for-sr">Reference Satici, Ruggiero, Lippiello and Siciliano</span>5</a>], that inherently use the dynamics of both the robot and the manipulated object [<a class="xref bibr" href="#ref6"><span class="show-for-sr">Reference Ruggiero, Lippiello and Siciliano</span>6</a>]. Non-prehensile manipulation, specifically juggling, exhibits connections with legged locomotion regarding the hybrid nature of the related dynamics, the zero-moment-point stability [<a class="xref bibr" href="#ref7"><span class="show-for-sr">Reference Sardain and Bessonnet</span>7</a>], and the dynamic balancing conditions [<a class="xref bibr" href="#ref8"><span class="show-for-sr">Reference Farid, Siciliano and Ruggiero</span>8</a>]. It was observed that the stability conditions for non-prehensile dynamic object manipulation and the support phase of a walking biped share the same set of equations. This fundamental concept can be leveraged to seamlessly transfer sensing, planning, and control frameworks developed for one field to the other. Among such control frameworks, energy-based control approaches can be exploited for both dynamic non-prehensile manipulation tasks and locomotion ones. The key role played by energy during biped locomotion was enlightened in passive-dynamic walking [<a class="xref bibr" href="#ref9"><span class="show-for-sr">Reference McGeer</span>9</a>]. Consequently, several control frameworks exploiting energy-related concepts were proposed through the years [<a class="xref bibr" href="#ref10"><span class="show-for-sr">Reference Holm and Spong</span>10</a>–<a class="xref bibr" href="#ref12"><span class="show-for-sr">Reference Spong, Holm and Lee</span>12</a>] to realize specific gaits with the sought features. Locomotion considered in the aforementioned papers occurs in ideal conditions, that is, in the absence of external forces acting on legs. On the other hand, the investigation of resilience to external disturbances has been a prominent focus over the years, encompassing both quadruped and biped robots. This emphasis stems from the crucial ability of legged robots to navigate challenging terrain, where the irregularity of the ground may result in an early impact of the foot, leading to external forces affecting the system [<a class="xref bibr" href="#ref13"><span class="show-for-sr">Reference Mao, Gao, Tian and Zhao</span>13</a>]. A momentum-based observer detecting the anticipated foot touchdown was presented in [<a class="xref bibr" href="#ref14"><span class="show-for-sr">Reference Bledt, Wensing, Ingersoll and Kim</span>14</a>] while disturbances applied on the center of mass only were considered in [<a class="xref bibr" href="#ref15"><span class="show-for-sr">Reference Fahmi, Mastalli, Focchi and Semini</span>15</a>], neglecting the presence of external forces acting on swing legs. While using an observer for external wrenches on the center of mass or stance feet can enhance locomotion on uneven terrains, it does not prevent the robot from falling after a significant impact on the swing leg. This collision results in a deviation of the foot from the planned motion, potentially causing the touchdown to occur far from the intended foothold. This, in turn, reduces the support polygon, destabilizing the robot. In severe cases, the swing leg might not touch the ground or collide with another leg, leading to a robot fall. Consequently, there is a need to estimate external forces acting on swing legs and compensate for these disturbances. In the following sections, we report an overview of the main achievements of the two research fields whereas Table <a class="xref table" href="#tblI">I</a> provides a summary of the recent contributions related to these aspects.</p><div class="table-wrap" data-magellan-destination="tblI" id="tblI"> <div class="caption"> <p class="p"><span class="label">Table I.</span> Summary of PRISMA Lab contributions in the field of dynamic manipulation and locomotion.</p> </div> <span> <div class="figure-thumb"><img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab1.png?pub-status=live" class="aop-lazy-load-image" width="956" height="442" data-original-image="/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab1.png" data-zoomable="true"></div> </span> </div> <div class="sec" data-magellan-destination="s2-1" id="s2-1"> <h3 class="B"><span class="label">2.1.</span> Dynamic non-prehensile manipulation</h3> <p class="p"> Manipulation pertains to making an intentional change in the environment or to objects that are being manipulated. When realized without completely restraining the object, manipulation is denoted as non-prehensile [<a class="xref bibr" href="#ref6"><span class="show-for-sr">Reference Ruggiero, Lippiello and Siciliano</span>6</a>]. The object is then subject to unilateral constraints and, in order to reach the goal, the dynamics both of the object and of the hand manipulating it, together with the related kinematics, static and quasi-static forces, must be exploited [<a class="xref bibr" href="#ref6"><span class="show-for-sr">Reference Ruggiero, Lippiello and Siciliano</span>6</a>]. The literature on the topic states that the conventional way to cope with a non-prehensile dynamic manipulation task is to split it into simpler subtasks, usually referred to as non-prehensile manipulation primitives, that is rolling, dynamic grasp, sliding, pushing, throwing, etc.</p><section><div class="fig" data-magellan-destination="f2" id="f2"> <div class="figure-thumb"><img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig2.png?pub-status=live" class="aop-lazy-load-image" width="4257" height="1913" data-original-image="/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig2.png" data-zoomable="true"></div> <div class="caption"><p class="p"> </p><p class="p"><span class="label">Figure 2.</span> Tray-based and pushing non-prehensile object manipulation scenarios. Upper row: a robot is tasked with transporting an object placed on a tray-like end-effector along a predefined, fast trajectory while avoiding the relative sliding (a) [<a class="xref bibr" href="#ref20"><span class="show-for-sr">Reference Selvaggio, Garg, Ruggiero, Oriolo and Siciliano</span>20</a>]. The robot performs a linear transporting trajectory while opportunely inclining the tray to improve the robustness of the task performance (b) [<a class="xref bibr" href="#ref19"><span class="show-for-sr">Reference Selvaggio, Cacace, Pacchierotti, Ruggiero and Giordano</span>19</a>]. Bottom row: an object is pushed by a mobile robot along a trajectory (c) [<a class="xref bibr" href="#ref16"><span class="show-for-sr">Reference Bertoncelli, Ruggiero and Sabattini</span>16</a>]. Multiple robots can push an object with minimal effort by optimally placing themself around it (d) [<a class="xref bibr" href="#ref17"><span class="show-for-sr">Reference Bertoncelli, Selvaggio, Ruggiero and Sabattini</span>17</a>].</p> </div></div></section> <p class="p"> Seminal works carried out in this direction investigate the non-prehensile rolling manipulation problem, where a single object rolls on the surface of a controlled manipulator. In [<a class="xref bibr" href="#ref26"><span class="show-for-sr">Reference Ryu, Ruggiero and Lynch</span>26</a>], backstepping was used to derive a control technique to stabilize a disk-on-disk rolling manipulation system. The goal was to stabilize by controlling a circular object on the top of a circular hand in the vertical plane. The effect of shapes in the input-state linearization of the considered non-prehensile planar rolling dynamic manipulation systems was later investigated in [<a class="xref bibr" href="#ref40"><span class="show-for-sr">Reference Lippiello, Ruggiero and Siciliano</span>40</a>]. Given the shapes of both the object and the manipulator, a state transformation was found allowing the possibility to exploit linear controls to stabilize the system.</p> <p class="p"> In tray-based non-prehensile manipulation (see Fig. <a class="xref fig" href="#f2">2</a> – upper row), the tasks of interest for the robotic system are opposite: (1) reconfigure objects in the hand by allowing them to intentionally slide or roll in the right direction; (2) transport objects placed on the tray while preventing them from sliding and falling. In the first case, the pose reconfiguration of a spherical object rolling on a tray-shaped hand, which is in turn actuated by a robot manipulator, was investigated in [<a class="xref bibr" href="#ref3"><span class="show-for-sr">Reference Serra, Ruggiero, Donaire, Buonocore, Lippiello and Siciliano</span>3</a>, <a class="xref bibr" href="#ref27"><span class="show-for-sr">Reference Serra, Ferguson, Ruggiero, Siniscalco, Petit, Lippiello and Siciliano</span>27</a>]: the control law is derived following an interconnection-and-damping-assignment passivity-based approach using a port-Hamiltonian (pH) dynamic model of the system. Full pose regulation of the sphere was achieved thanks to a purposely developed planner. In the second case, the objective is to prevent objects’ sliding induced by inertial forces while carrying the object from one place to another. Adaptive tray orientation was shown to help achieve higher linear accelerations during the tracking of a fast trajectory, minimizing the occurrence of object slipping. The idea behind this is to let the tray surface completely counteract the net force acting on the object. A quadratic program was used to compute the optimal robot manipulator torque control input to enforce non-sliding conditions for the object with adaptive tray orientation while also considering the system’s kinematic and dynamic constraints in [<a class="xref bibr" href="#ref21"><span class="show-for-sr">Reference Subburaman, Selvaggio and Ruggiero</span>21</a>]. Instead, keeping the tray in the upright configuration, a jerk-based model predictive non-sliding manipulation control was proposed in [<a class="xref bibr" href="#ref20"><span class="show-for-sr">Reference Selvaggio, Garg, Ruggiero, Oriolo and Siciliano</span>20</a>] for the same task showing superior performance: considering the rate-of-change of the joint torque as the output of the controller, a smooth torque control profile is obtained while allowing direct control of the contact forces. Tray-based non-prehensile manipulation was recently used to develop a shared control teleoperation framework for users to safely transport objects using a remotely located robot [<a class="xref bibr" href="#ref19"><span class="show-for-sr">Reference Selvaggio, Cacace, Pacchierotti, Ruggiero and Giordano</span>19</a>]. The proposed shared control approach shapes the motion commands imparted by the user to the remote robot and automatically regulates the end-effector orientation to more robustly prevent the object from sliding over the tray. Tray-based non-prehensile manipulation with a mobile manipulator dynamically balancing objects on its end-effector without grasping them was presented in [<a class="xref bibr" href="#ref41"><span class="show-for-sr">Reference Heins and Schoellig</span>41</a>]. A whole-body constrained model predictive controller for a mobile manipulator that balances objects and avoids collisions was developed for the considered task. More recently, researchers have focused on fast slosh-free fluid transportation [<a class="xref bibr" href="#ref42"><span class="show-for-sr">Reference Muchacho, Laha, Figueredo and Haddadin</span>42</a>]. Here the goal was to generate slosh-free trajectories by controlling the pendulum model of the liquid surface with constrained quadratic program optimization to obtain valid control inputs. This online technique allowed the motion generator to be used for real-time non-prehensile slosh-free teleoperation of liquids [<a class="xref bibr" href="#ref43"><span class="show-for-sr">Reference Muchacho, Bien, Laha, Naceri, Figueredo and Haddadin</span>43</a>].</p> <p class="p"> In those cases in which the object is too heavy or too large to be grasped, pushing an object is a simple solution widely adopted by humans, and the same concept can be thus transferred to robots (see Fig. <a class="xref fig" href="#f2">2</a> – bottom row). A technique to manipulate an object with a non-holonomic mobile robot using the pushing non-prehensile manipulation primitive was presented in [<a class="xref bibr" href="#ref16"><span class="show-for-sr">Reference Bertoncelli, Ruggiero and Sabattini</span>16</a>]. Such a primitive involves unilateral constraints associated with the friction between the robot and the manipulated object. Violating this constraint produces the slippage of the object during the manipulation. A linear time-varying model predictive control was designed to properly include the unilateral constraint within the control action. The framework can be extended in the case of multi-robots: a task-oriented contact placement optimization strategy for object pushing that allows calculating optimal contact points minimizing the amplitude of forces required to execute the task was presented in [<a class="xref bibr" href="#ref17"><span class="show-for-sr">Reference Bertoncelli, Selvaggio, Ruggiero and Sabattini</span>17</a>].</p> <p class="p"> Many of the proposed methods handle flat objects with primitive geometric shapes moving quasi-statically on high-friction surfaces, yet they usually make use of complex analytical models or utilize specialized physics engines to predict the outcomes of various interactions. On the other hand, an experience-based approach, which does not require any explicit analytical model or the help of a physics engine was proposed in [<a class="xref bibr" href="#ref44"><span class="show-for-sr">Reference Meriçli, Veloso and Akın</span>44</a>] where a mobile robot simply experiments with pushable complex 3D real-world objects to observe and memorize their motion characteristics together with the associated motion uncertainties resulting from varying initial caster wheel orientations and potential contacts between the robot and the object. A probabilistic method for autonomous learning of an approximate dynamics model for these objects was presented in [<a class="xref bibr" href="#ref45"><span class="show-for-sr">Reference Novin, Yazdani, Merryweather and Hermans</span>45</a>]. In this method, the dynamic parameters were learned using a small dataset consisting of force and motion data from interactions between the robot and objects. Based on these concepts, a rearrangement algorithm that relies on only a few known straight-line pushes for some novel object and requires no analytical models, force sensors, or large training datasets was proposed in [<a class="xref bibr" href="#ref4"><span class="show-for-sr">Reference Chai, Peng and Tsao</span>4</a>]. The authors experimentally verified the performance of their algorithm by rearranging several types of objects by pushing them to any target planar pose.</p> <p class="p"> Research on other non-prehensile manipulation primitives further includes sliding (for pizza-baking applications) [<a class="xref bibr" href="#ref28"><span class="show-for-sr">Reference Gutiérrez-Giles, Ruggiero, Lippiello and Siciliano</span>28</a>], throwing [<a class="xref bibr" href="#ref5"><span class="show-for-sr">Reference Satici, Ruggiero, Lippiello and Siciliano</span>5</a>], stretching a deformable object [<a class="xref bibr" href="#ref29"><span class="show-for-sr">Reference Kim, Ruggiero, Lippiello, Siciliano, Siciliano and Ruggiero</span>29</a>], and related ones [<a class="xref bibr" href="#ref30"><span class="show-for-sr">Reference Ruggiero, Kim, Gutiérrez-Giles, Satici, Donaire, Cacace, Buonocore, Fontanelli, Lippiello, Siciliano, Gusikhin and Madani</span>30</a>, <a class="xref bibr" href="#ref31"><span class="show-for-sr">Reference Ruggiero, Petit, Serra, Satici, Cacace, Donaire, Ficuciello, Buonocore, Fontanelli, Lippiello, Villani and Siciliano</span>31</a>].</p> </div> <div class="sec" data-magellan-destination="s2-2" id="s2-2"> <h3 class="B"><span class="label">2.2.</span> Legged robotics</h3> <p class="p"> Motivated by the connection between bipedal locomotion and non-prehensile manipulation [<a class="xref bibr" href="#ref8"><span class="show-for-sr">Reference Farid, Siciliano and Ruggiero</span>8</a>], the methodology proposed initially in [<a class="xref bibr" href="#ref3"><span class="show-for-sr">Reference Serra, Ruggiero, Donaire, Buonocore, Lippiello and Siciliano</span>3</a>] to achieve the stabilization of non-prehensile planar rolling manipulation tasks was subsequently extended to tackle the gait-generation problem of a simple <em class="italic">compass-like biped robot</em> in [<a class="xref bibr" href="#ref34"><span class="show-for-sr">Reference Arpenti, Ruggiero and Lippiello</span>34</a>]. The common control framework is based on a modification of the well-known <em class="italic">interconnection-and-damping-assignment passivity-based control</em> (IDA-PBC) of pH systems, where an appropriate parameterization of the inertia matrix was proposed to avoid the explicit solution of the matching partial differential equations (PDEs) arising during control synthesis. Due to the critical role played by energy exchange during walking, the methodology was profitably applied to passive-dynamic walking. Thanks to the novel control strategy, new gaits were generated, which are manifestly different from the passive gait. The result was a controlled planar walker moving manifestly slower or faster (depending on control tuning) than the open-loop system while preserving the system’s passivity due to the closed-loop pH structure.</p> <p class="p"> An alternative constructive methodology, improving some issues present in [<a class="xref bibr" href="#ref3"><span class="show-for-sr">Reference Serra, Ruggiero, Donaire, Buonocore, Lippiello and Siciliano</span>3</a>], was proposed in [<a class="xref bibr" href="#ref35"><span class="show-for-sr">Reference Arpenti, Ruggiero and Lippiello</span>35</a>]. In line with the same problem, the effect of dissipative forces deployed in the controller on gait generation was investigated in [<a class="xref bibr" href="#ref36"><span class="show-for-sr">Reference Nacusse, Arpenti, Ruggiero and Lippiello</span>36</a>]. There, two alternative control methodologies exploiting dissipative forces, termed <em class="italic">simultaneous interconnection-and-damping-assignment passivity-based control</em> (SIDA-PBC) and <em class="italic">energy pumping-and-damping passivity-based control</em> (EPD-PBC), respectively, demonstrated better results in achieving slow gaits, characterized by small step lengths and large step periods, compared to the performance of the IDA-PBC. SIDA-PBC carries out the energy shaping and the damping injection simultaneously, thanks to dissipative forces in the desired dynamics, differently from IDA-PBC, where these two control actions are carried out in two distinct steps. On the other hand, EPD-PBC proved to be an efficient control strategy to face another control task belonging to the realm of legged locomotion, namely the <em class="italic">gait robustification</em> problem, that is, the enlargement of the basin of attraction of the limit cycle associated with the natural passive gait of the compass-like biped [<a class="xref bibr" href="#ref32"><span class="show-for-sr">Reference Arpenti, Donaire, Ruggiero and Lippiello</span>32</a>]. This was achieved by alternating energy injection and dissipation into/from the system to stabilize the walker at the target energy value corresponding to the natural gait. Moreover, the EPD-PBC methodology was also used with the IDA-PBC approach, showing that not only the natural passive gaits but also the gaits generated through energy shaping can be robustified using the proposed design [<a class="xref bibr" href="#ref32"><span class="show-for-sr">Reference Arpenti, Donaire, Ruggiero and Lippiello</span>32</a>]. This work was carried out within the <em class="italic">hybrid zero dynamics</em> (HZD) framework which also served as a starting point for the development of a tracking controller based on IDA-PBC able to guarantee the exponentially fast convergence of suitably defined output dynamics to the HZD manifold [<a class="xref bibr" href="#ref33"><span class="show-for-sr">Reference Arpenti, Donaire, Ruggiero and Lippiello</span>33</a>]. The proposed strategy conferred robustness concerning parametric uncertainties to the closed-loop system by assigning desired error dynamics described through the pH formalism, thus preserving passivity.</p> <p class="p"> On the quadrupedal locomotion side, an estimator of external disturbances independently acting on stance and swing legs was proposed in [<a class="xref bibr" href="#ref39"><span class="show-for-sr">Reference Morlando, Teimoorzadeh and Ruggiero</span>39</a>]. Based on the system’s momentum, the estimator was leveraged along with a suitable motion planner for the trajectory of the robot’s center of mass and an optimization problem based on the modulation of ground reaction forces in a whole-body control strategy. Such a control architecture allows the locomotion of a legged robot inside an unstructured environment where collisions could happen and where irregularities in the terrain cause disturbances on legs. When significant forces act on both the center of mass and the robot’s legs, momentum-based observers are insufficient. Therefore, the work in [<a class="xref bibr" href="#ref38"><span class="show-for-sr">Reference Morlando and Ruggiero</span>38</a>] proposed a “hybrid” observer, an estimator that combines a momentum-based observer for the angular term and an acceleration-based observer for the translational one, employing directly measurable values from the sensors. An approach based on two observers was also proposed in [<a class="xref bibr" href="#ref37"><span class="show-for-sr">Reference Morlando, Lippiello and Ruggiero</span>37</a>], where a framework to control a quadruped robot tethered to a visually impaired person was presented, as illustrated in Fig. <a class="xref fig" href="#f3">3</a> (left). Finally, in [<a class="xref bibr" href="#ref18"><span class="show-for-sr">Reference Morlando, Selvaggio and Ruggiero</span>18</a>], the problem of non-prehensile object transportation through a legged manipulator is faced, arriving at a perfect combination of the topics seen in this section. An alternative whole-body control architecture was devised to prevent the sliding of the object placed on the tray at the manipulator’s end-effector while retaining the quadruped robot balance during walking, as shown in Fig. <a class="xref fig" href="#f3">3</a> (right). Both contact forces between the tray and the object and between the legs and the ground were kept within their respective friction cones by solving a quadratic optimization problem while achieving the sought transportation task.</p><section><div class="fig" data-magellan-destination="f3" id="f3"> <div class="figure-thumb"><img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig3.png?pub-status=live" class="aop-lazy-load-image" width="4250" height="1385" data-original-image="/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig3.png" data-zoomable="true"></div> <div class="caption"><p class="p"> </p><p class="p"><span class="label">Figure 3.</span> On the left, a quadruped robot is connected to a human through a leash. This scenario was tested in the Gazebo simulation environment emulating a guide dog helping a visually impaired person. In the middle, a legged manipulator transports an object placed on a tray-like end-effector while simultaneously preventing it from sliding. On the right, the model behind this task, where the object (red cube) is prevented from sliding by keeping contact forces (blue) inside the friction cones (green).</p> </div></div></section> </div> </div> <div class="sec other" data-magellan-destination="s3" id="s3"> <h2 class="A"><span class="label">3.</span> Aerial robotics</h2> <p class="p"> Aerial robotics has been consolidated in the last decade as a research topic of interest for modeling and control, perception, planning, manipulation, and design. As such, it constitutes an effective technological solution for various applications such as inspection and maintenance, search and rescue, transportation and delivery, monitoring and patrolling, or 3D mapping. The maturity level reached in this field has led to the rise of several applications of aerial robots, with a focus on high altitude and challenging access scenarios that human operators cannot easily reach. The time, risk, and cost associated with conventional solutions involving the deployment of heavy vehicles and infrastructures motivate the development of aerial robots capable of quickly reaching these workspaces and performing visual or contact inspection operations. The research community faced two main problems during the deployment of reliable autonomous aerial robots. Firstly, conventional Vertical Takeoff and Landing (VToL) devices, like multirotor Unmanned Aerial Vehicles (UAVs) with parallel axes, faced challenges due to underactuation, impacting stabilization and trajectory tracking. Commonly, a hierarchical controller [<a class="xref bibr" href="#ref46"><span class="show-for-sr">Reference Mahony and Hamel</span>46</a>, <a class="xref bibr" href="#ref47"><span class="show-for-sr">Reference Nonami, Kendoul, Suzuki and Wang</span>47</a>] addresses this with time-scale separation between linear and angular dynamics. Position and yaw angle of VToL UAVs are flat outputs [<a class="xref bibr" href="#ref48"><span class="show-for-sr">Reference Spica, Franchi, Oriolo, Bülthoff and Giordano</span>48</a>], allowing trajectory tracking and solving the underactuated problem. Secondly, as UAV aerodynamic models are complex, these require robust control designs. Most designs incorporated integral action to handle disturbances and cope with uncertainties (e.g., battery level). Adaptive controls [<a class="xref bibr" href="#ref49"><span class="show-for-sr">Reference Antonelli, Cataldi, Giordano, Chiaverini and Franchi</span>49</a>–<a class="xref bibr" href="#ref51"><span class="show-for-sr">Reference Roberts and Tayebi</span>51</a>], force observers [<a class="xref bibr" href="#ref52"><span class="show-for-sr">Reference Yüksel, Secchi, Bülthoff and Franchi</span>52</a>], and passivity-based controllers [<a class="xref bibr" href="#ref53"><span class="show-for-sr">Reference Egeland and Godhavn</span>53</a>] enhanced robustness. PH methods [<a class="xref bibr" href="#ref52"><span class="show-for-sr">Reference Yüksel, Secchi, Bülthoff and Franchi</span>52</a>] and passive backstepping [<a class="xref bibr" href="#ref54"><span class="show-for-sr">Reference Ha, Zuo, Choi and Lee</span>54</a>] were explored for improved control. For further exploration, comprehensive literature reviews can be found in [<a class="xref bibr" href="#ref55"><span class="show-for-sr">Reference Valvanis</span>55</a>, <a class="xref bibr" href="#ref56"><span class="show-for-sr">Reference Valvanis and Vachtsevanos</span>56</a>] among the others.</p><div class="table-wrap" data-magellan-destination="tblII" id="tblII"> <div class="caption"> <p class="p"><span class="label">Table II.</span> Summary of PRISMA Lab contributions in the field of aerial robotics.</p> </div> <span> <div class="figure-thumb"><img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab2.png?pub-status=live" class="aop-lazy-load-image" width="674" height="430" data-original-image="/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab2.png" data-zoomable="true"></div> </span> </div> <p class="p"> Nowadays, the goal is the development of a new generation of flying service robots capable of supporting human beings in all those activities requiring the ability to interact actively and safely in the air. Challenging fields include inspecting buildings and large infrastructures, sample picking, and remote aerial manipulation. The latter is intended as the grasping, transporting, positioning, assembly and disassembly of mechanical parts, measurement instruments, and any objects performed with aerial vehicles. Indeed, UAVs are currently migrating from passive tasks like inspection, surveillance, monitoring, remote sensing, and so on, to active tasks like grasping and manipulation. UAVs must have the proper tools to accomplish manipulation tasks in the air. The two most adopted solutions are either to mount a gripper or a multi-fingered hand directly on the aerial vehicle, for example, a flying hand, or to equip the UAV with one or more robotic arms, for example, an unmanned aerial manipulator (UAM) as shown in Fig. <a class="xref fig" href="#f4">4</a>. The UAM could be an efficient solution providing an aerial vehicle capable of performing dexterous manipulation tasks. Surveys regarding aerial manipulation can be found in refs. [<a class="xref bibr" href="#ref57"><span class="show-for-sr">Reference Oller, Tognon, Suarez, Lee and Franchi</span>57</a>, <a class="xref bibr" href="#ref58"><span class="show-for-sr">Reference Ruggiero, Lippiello and Ollero</span>58</a>].</p> <p class="p"> In the following sections, an overview of the work carried out in aerial vehicle control and aerial manipulation is revised. Table <a class="xref table" href="#tblII">II</a> provides a summary of the recent contributions related to these aspects. </p><section><div class="fig" data-magellan-destination="f4" id="f4"> <div class="figure-thumb"><img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig4.png?pub-status=live" class="aop-lazy-load-image" width="4250" height="1126" data-original-image="/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig4.png" data-zoomable="true"></div> <div class="caption"><p class="p"> </p><p class="p"><span class="label">Figure 4.</span> Two unmanned aerial manipulators during non-destructive test measurements. On the left, an aerial vehicle equipped with one arm is measuring the thickness of a wall with an ultrasonic probe. On the right, a hybrid drone equipped with a specially developed omnidirectional mobile base that can land on pipelines and then move to position ad-hoc measurement systems for non-destructive test measures.</p> </div></div></section> <div class="sec" data-magellan-destination="s3-1" id="s3-1"> <h3 class="B"><span class="label">3.1.</span> Control of aerial vehicles</h3> <p class="p"> Model-based control of VToL UAVs leverages many simplifications by neglecting several aerodynamic effects whose presence affects the performance of tracking and regulation control problems. Therefore, researchers always seek robustification techniques to improve related problems.</p> <p class="p"> An estimator of unmodeled dynamics and external wrench acting on the VToL UAV and based on the system’s momentum was employed in [<a class="xref bibr" href="#ref59"><span class="show-for-sr">Reference Ruggiero, Cacace, Sadeghian and Lippiello</span>59</a>] to compensate for such disturbances. This estimator can be inserted in standard hierarchical controllers commanding UAVs with a flat propeller configuration. Another estimator, based on a robust extended-state observer, was designed in [<a class="xref bibr" href="#ref60"><span class="show-for-sr">Reference Sotos, Cacace, Ruggiero and Lippiello</span>60</a>]. In this case, a UAV with passively tilted propellers was considered. In the case of a UAV with actively tilted propellers, instead, a robust controller is devised in [<a class="xref bibr" href="#ref61"><span class="show-for-sr">Reference Sotos, Ruggiero and Lippiello</span>61</a>]. The proposed technique is model-free and based on a hyperbolic controller globally attracting the error signals to an ultimate bound about the origin despite external disturbances.</p> <p class="p"> In the case of a quadrotor, the loss or damage of one propeller can be dramatic for the aerial vehicle’s stable flight. The techniques developed in refs. [<a class="xref bibr" href="#ref62"><span class="show-for-sr">Reference Lippiello, Ruggiero and Serra</span>62</a>, <a class="xref bibr" href="#ref63"><span class="show-for-sr">Reference Lippiello, Ruggiero and Serra</span>63</a>] can be employed to perform an emergency landing. While both are supposed to turn off the propeller as opposed to the damaged one, resulting in a bi-rotor configuration in which the yaw is uncontrolled, the former considers a PID approach, while the latter a backstepping approach to track the emergency landing trajectory in the Cartesian space.</p> </div> <div class="sec" data-magellan-destination="s3-2" id="s3-2"> <h3 class="B"><span class="label">3.2.</span> Aerial manipulation</h3> <p class="p"> Four elements mainly constitute a UAM: <span data-mathjax-status="alt-graphic" class="inline-formula"> <span class="alternatives"> <img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_inline1.png?pub-status=live" class="aop-lazy-load-image mathjax-alternative mathjax-alt-graphic mathjax-off" width="10" height="16" data-original-image="/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_inline1.png" data-zoomable="false"> <span class="mathjax-tex-wrapper" data-mathjax-type="texmath"><span class="tex-math mathjax-tex-math mathjax-on"> $i)$ </span></span> </span> </span> the UAV floating base; <span data-mathjax-status="alt-graphic" class="inline-formula"> <span class="alternatives"> <img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_inline2.png?pub-status=live" class="aop-lazy-load-image mathjax-alternative mathjax-alt-graphic mathjax-off" width="15" height="16" data-original-image="/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_inline2.png" data-zoomable="false"> <span class="mathjax-tex-wrapper" data-mathjax-type="texmath"><span class="tex-math mathjax-tex-math mathjax-on"> $ii)$ </span></span> </span> </span> the robotic arm(s); <span data-mathjax-status="alt-graphic" class="inline-formula"> <span class="alternatives"> <img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_inline3.png?pub-status=live" class="aop-lazy-load-image mathjax-alternative mathjax-alt-graphic mathjax-off" width="20" height="16" data-original-image="/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_inline3.png" data-zoomable="false"> <span class="mathjax-tex-wrapper" data-mathjax-type="texmath"><span class="tex-math mathjax-tex-math mathjax-on"> $iii)$ </span></span> </span> </span> the gripper(s) or multi-fingered hand(s) attached at the end-effector of the arm(s); iv) the necessary sensory system. During the flight, the mounted robot arm provides even more issues since its dynamics depend on the actual configuration state of the whole system. There are two approaches to addressing planning and control problems for a UAM. The former is a “centralized” approach in which the UAV and the robotic arm are considered a unique entity. Thus the planning and the controller are designed from the complete kinematic and dynamic models. The latter approach considers the UAV and the robotic arm as separate independent systems. The effects of the arm on the aerial vehicle can be then considered external disturbances and vice versa [<a class="xref bibr" href="#ref64"><span class="show-for-sr">Reference D’Ago, Selvaggio, Suarez, Gañán, Buonocore, Di Castro, Lippiello, Ollero and Ruggiero</span>64</a>, <a class="xref bibr" href="#ref65"><span class="show-for-sr">Reference Ruggiero, Trujillo, Cano, Ascorbe, Viguria, Peréz, Lippiello, Ollero and Siciliano</span>65</a>].</p> <p class="p"> Aerial manipulation is now almost a reality in inspection and maintenance applications, particularly non-destructive test (NDT) measurements (see Fig. <a class="xref fig" href="#f4">4</a>). In this scenario, ultrasonic probes are used to retrieve the wall thickness of a surface to prove the integrity of the material without compromising its internal structure. These tests are performed by placing the inspection probe in fixed contact with the surface under examination. Currently, NDT measurements are performed by humans who must climb a high scaffolding to reach the inspection location with the use of tools like man-lifts, cranes, or rope-access systems. Therefore, improving NDT inspection operations is fundamental to raising human safety and decreasing the economic costs of inspection procedures. The platforms presented in refs. [<a class="xref bibr" href="#ref66"><span class="show-for-sr">Reference Cacace, Fontanelli and Lippiello</span>66</a>, <a class="xref bibr" href="#ref67"><span class="show-for-sr">Reference Cacace, Silva, Fontanelli and Lippiello</span>67</a>] are possible solutions to address NDT measurements in challenging plants. There, a robotic arm was used for pipe inspection. Besides this, UAMs can interact with humans and help them in daily activities, becoming efficient aerial coworkers, particularly for working at height in inspection and maintenance activities that still require human intervention. Therefore, as long as the application range of drones increases, the possibility of sharing the human workspace also increases. Hence, it becomes paramount to understand how the interaction between humans and drones is established. The work in [<a class="xref bibr" href="#ref68"><span class="show-for-sr">Reference Cuniato, Cacace, Selvaggio, Ruggiero and Lippiello</span>68</a>] went in this direction thanks to implementing a hardware-in-the-loop simulator for human cooperation with an aerial manipulator. The simulator provided the user with realistic haptic feedback for a human-aerial manipulator interaction activity. The forces exchanged between the hardware interface and the human/environment were measured and supplied to a dynamically simulated aerial manipulator. In turn, the simulated aerial platform fed back its position to the hardware allowing the human to feel and evaluate the interaction effects. Besides human-aerial manipulator cooperation, the simulator contributed to developing and testing autonomous control strategies in aerial manipulation.</p> <p class="p"> Autonomous aerial manipulation tasks can be accomplished also thanks to the use of exteroceptive sensing for an image-based visual impedance control that allows realizing physical interaction of a dual-arm UAM equipped with a camera and a force/torque sensor [<a class="xref bibr" href="#ref69"><span class="show-for-sr">Reference Lippiello, Fontanelli and Ruggiero</span>69</a>]. The design of a hierarchical task-composition framework for controlling a UAM, which integrates the main benefits of both image-based and position-based control schemes into a unified hybrid-control framework, was presented in [<a class="xref bibr" href="#ref25"><span class="show-for-sr">Reference Lippiello, Cacace, Santamaria-Navarro, Andrade-Cetto, Trujillo, Esteves and Viguria</span>25</a>]. Aerial manipulation tasks enabled by the proposed methods include the autonomous installation of clip bird diverters on high-voltage lines through a drone equipped with a sensorized stick to realize a compliant interaction with the environment [<a class="xref bibr" href="#ref70"><span class="show-for-sr">Reference D’Angelo, Pagano, Ruggiero and Lippiello</span>70</a>]. Besides enabling safer human operations, such application realize the huge impact of reducing collisions with wires by <span data-mathjax-status="alt-graphic" class="inline-formula"> <span class="alternatives"> <img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_inline4.png?pub-status=live" class="aop-lazy-load-image mathjax-alternative mathjax-alt-graphic mathjax-off" width="16" height="13" data-original-image="/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_inline4.png" data-zoomable="false"> <span class="mathjax-tex-wrapper" data-mathjax-type="texmath"><span class="tex-math mathjax-tex-math mathjax-on"> $50$ </span></span> </span> </span> to <span data-mathjax-status="alt-graphic" class="inline-formula"> <span class="alternatives"> <img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_inline5.png?pub-status=live" class="aop-lazy-load-image mathjax-alternative mathjax-alt-graphic mathjax-off" width="29" height="13" data-original-image="/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_inline5.png" data-zoomable="false"> <span class="mathjax-tex-wrapper" data-mathjax-type="texmath"><span class="tex-math mathjax-tex-math mathjax-on"> $90\%$ </span></span> </span> </span> saving tens of thousands of birds’ lives during their migrations.</p><div class="table-wrap" data-magellan-destination="tblIII" id="tblIII"> <div class="caption"> <p class="p"><span class="label">Table III.</span> Summary of PRISMA Lab contributions in the field of physical human-robot interaction.</p> </div> <span> <div class="figure-thumb"><img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab3.png?pub-status=live" class="aop-lazy-load-image" width="673" height="388" data-original-image="/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab3.png" data-zoomable="true"></div> </span> </div> </div> </div> <div class="sec other" data-magellan-destination="s4" id="s4"> <h2 class="A"><span class="label">4.</span> Physical human-robot interaction</h2> <p class="p"> Performing physical actions robots can help humans in their jobs of daily lives [<a class="xref bibr" href="#ref71"><span class="show-for-sr">Reference Selvaggio, Cognetti, Nikolaidis, Ivaldi and Siciliano</span>71</a>]. This is useful in several applications ranging from physical assistance to disabled or elderly people to reduction of risks and fatigue at work. However, an intuitive, safe, and reliable interaction must be established for the robot to become an ideal proximal or remote assistant/collaborator. In the following sections, we are going to review recent work in this direction. Table <a class="xref table" href="#tblIII">III</a> provides a summary of the recent contributions in this field.</p> <div class="sec" data-magellan-destination="s4-1" id="s4-1"> <h3 class="B"><span class="label">4.1.</span> Proximal collaborative execution of structured tasks</h3> <p class="p"> While collaborative robotic platforms ensuring safe and compliant physical HRI are spreading in service robotics applications, the collaborative execution of structured collaborative tasks still poses relevant research challenges [<a class="xref bibr" href="#ref72"><span class="show-for-sr">Reference Johannsmeier and Haddadin</span>72</a>]. An effective and fluent human-robot collaboration during the execution of structured activities should support both cognitive and physical interaction. In these settings, operators and robots continuously estimate their reciprocal intentions to decide whether to commit to shared activities, when to switch towards different task, or how to regulate compliant interactions during co-manipulation operations. In refs. [<a class="xref bibr" href="#ref73"><span class="show-for-sr">Reference Cacace, Caccavale, Finzi and Grieco</span>73</a>, <a class="xref bibr" href="#ref74"><span class="show-for-sr">Reference Cacace, Caccavale, Finzi and Lippiello</span>74</a>], we addressed these issues by proposing a human-robot collaborative framework which seamlessly integrates task monitoring, task orchestration, and task-situated interpretation of the human physical guidance (see Fig. <a class="xref fig" href="#f5">5</a> (e)) during the joint execution of hierarchically structured manipulation activities. In this setting, task orchestration and adaptation occur simultaneously with the interpretation of the human interventions. Depending on the assigned tasks, the supervisory framework enables potential subtasks, targets, and trajectories, while the human guidance is monitored by LSTM networks that classify the physical interventions of the operator. When the human guidance is assessed as aligned with the planned activities, the robotic system can keep executing the current activities, while suitably adjusting subtasks, targets, or motion trajectories following the corrections provided by the operator. Within this collaborative framework, different modalities of human-robot collaboration (human-guided, task-guided, balanced) were explored and assessed in terms of their effectiveness and user experience during the interaction.</p><section><div class="fig" data-magellan-destination="f5" id="f5"> <div class="figure-thumb"><img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig5.png?pub-status=live" class="aop-lazy-load-image" width="4255" height="1611" data-original-image="/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig5.png" data-zoomable="true"></div> <div class="caption"><p class="p"> </p><p class="p"><span class="label">Figure 5.</span> (a) cognitive control framework compatible with AI methods for planning, reasoning, and learning; (b) task orchestration and situated interpretation of ambiguous human gestures; (c) kinesthetic teaching of structured tasks; combined task and motion plans (d); human-robot collaboration during the execution of a shared task (e).</p> </div></div></section> </div> <div class="sec" data-magellan-destination="s4-2" id="s4-2"> <h3 class="B"><span class="label">4.2.</span> Remote collaboration via shared control</h3> <p class="p"> Physical interactions between humans and robots are exploited to perform common or independent tasks. When the two parts work together to achieve a common goal, the robotic system may integrate some degree of autonomy aimed to help the human in executing the task, ensuring better performance, safety, and ergonomics. We refer to these as shared control or shared autonomy scenarios, with the latter considered as the case in which the autonomy level is possibly varying [<a class="xref bibr" href="#ref71"><span class="show-for-sr">Reference Selvaggio, Cognetti, Nikolaidis, Ivaldi and Siciliano</span>71</a>]. Broadly speaking there is the spectrum of possible interactions between humans and robots, from robots having full autonomy to none at all [<a class="xref bibr" href="#ref75"><span class="show-for-sr">Reference Goodrich and Schultz</span>75</a>]. As full autonomy still poses a problem for robotic systems when dealing with unknown or complex tasks in unstructured and uncertain scenarios [<a class="xref bibr" href="#ref76"><span class="show-for-sr">Reference Yang, Cambias, Cleary, Daimler, Drake, Dupont, Hata, Kazanzides, Martel, Patel, Santos and Taylor</span>76</a>], shared control comes useful to improve the task performance while not increasing the human operator workload [<a class="xref bibr" href="#ref77"><span class="show-for-sr">Reference Kanda and Ishiguro</span>77</a>]. Research about shared control focuses on the extent of human intervention in the control of artificial systems, splitting the workload between the two [<a class="xref bibr" href="#ref78"><span class="show-for-sr">Reference Schilling, Burgard, Muelling, Wrede and Ritter</span>78</a>]. The extent of human intervention, and thus robot autonomy, has been usually classified into discrete levels [<a class="xref bibr" href="#ref79"><span class="show-for-sr">Reference Bruemmer, Dudenhoeffer and Marble</span>79</a>–<a class="xref bibr" href="#ref81"><span class="show-for-sr">Reference Kortenkamp, Keirn-Schreckenghost and Bonasso</span>81</a>], with fewer studies considering a continuous domain [<a class="xref bibr" href="#ref82"><span class="show-for-sr">Reference Anderson, Peters, Iagnemma and Overholt</span>82</a>, <a class="xref bibr" href="#ref83"><span class="show-for-sr">Reference Desai and Yanco</span>83</a>]. Commonly, shared control techniques aim to fully or partially replace a function, such as identifying objects in cluttered environments [<a class="xref bibr" href="#ref84"><span class="show-for-sr">Reference Pitzer, Styer, Bersch, DuHadway and Becker</span>84</a>], while others start from a fully autonomous robot and give control to the user only in difficult situations [<a class="xref bibr" href="#ref80"><span class="show-for-sr">Reference Dias, Kannan, Browning, Jones, Argall, Dias, Zinck, Veloso and Stentz</span>80</a>, <a class="xref bibr" href="#ref81"><span class="show-for-sr">Reference Kortenkamp, Keirn-Schreckenghost and Bonasso</span>81</a>, <a class="xref bibr" href="#ref85"><span class="show-for-sr">Reference Sellner, Simmons and Singh</span>85</a>]. Some studies assist the operator by predicting their intent while selecting among different targets [<a class="xref bibr" href="#ref86"><span class="show-for-sr">Reference Dragan and Srinivasa</span>86</a>, <a class="xref bibr" href="#ref87"><span class="show-for-sr">Reference Javdani, Srinivasa and Bagnell</span>87</a>], while others exploit haptic feedback/guidance techniques while moving toward a specific target [<a class="xref bibr" href="#ref88"><span class="show-for-sr">Reference Aarno, Ekvall and Kragic</span>88</a>, <a class="xref bibr" href="#ref89"><span class="show-for-sr">Reference Crandall and Goodrich</span>89</a>].</p><section><div class="fig" data-magellan-destination="f6" id="f6"> <div class="figure-thumb"><img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig6.png?pub-status=live" class="aop-lazy-load-image" width="2881" height="1260" data-original-image="/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig6.png" data-zoomable="true"></div> <div class="caption"><p class="p"> </p><p class="p"><span class="label">Figure 6.</span> A shared control telerobotic system consists of a local device used to jointly send partial commands and receive computed haptic information as feedback from the remote side. The user usually observes the remote environment by means of a camera that provides a limited awareness of the scene. In (a), the robot must execute a remote object grasping task [<a class="xref bibr" href="#ref91"><span class="show-for-sr">Reference Selvaggio, Giordano, Ficuciello and Siciliano</span>91</a>]. In this case, provided haptic information aims to increase the situational awareness of the operator informing about the proximity to the robot’s joint limits and singularities. In (b) and (c), vision-based or programmed virtual fixtures aid the execution of the task in industrial and surgical robotic settings, respectively [<a class="xref bibr" href="#ref92"><span class="show-for-sr">Reference Selvaggio, Fontanelli, Ficuciello, Villani and Siciliano</span>92</a>, <a class="xref bibr" href="#ref93"><span class="show-for-sr">Reference Selvaggio, Notomista, Chen, Gao, Trapani and Caldwell</span>93</a>]. In (d), a non-prehensile object transportation scenario is considered and haptic feedback is provided about the proximity to the sliding conditions of the object placed on the tray [<a class="xref bibr" href="#ref19"><span class="show-for-sr">Reference Selvaggio, Cacace, Pacchierotti, Ruggiero and Giordano</span>19</a>].</p> </div></div></section> <p class="p"> Shared control/autonomy may take several forms and make use of a wide spectrum of methodologies depending on the application scenario. For example, when a human has to perform a complex manipulation task in a remote area by means of a dual-arm system, shared control methods may be designed to reduce the number of degrees of freedom controlled by the user while ensuring the task’s feasibility [<a class="xref bibr" href="#ref90"><span class="show-for-sr">Reference Selvaggio, Abi-Farraj, Pacchierotti, Giordano and Siciliano</span>90</a>]. In this way, the task execution becomes inherently less demanding both physically and cognitively. With the same aim, the autonomy and the human may be in charge of tasks having different priorities. In these cases, the tasks are usually organized hierarchically in a stack. Also in this case, controlling only one task, involving a minimum number of degrees of freedom, the human control of the robotic system becomes less fatigued [<a class="xref bibr" href="#ref91"><span class="show-for-sr">Reference Selvaggio, Giordano, Ficuciello and Siciliano</span>91</a>]. In remote applications, the user’s perception and awareness of the environment are usually hindered by the limited field of view provided by the remotely installed vision sensors (see Fig. <a class="xref fig" href="#f6">6</a> (a)). For this reason, it is beneficial to exploit additional communication channels (besides the visual one) to convey information about the state of the remote system/environment.</p> <p class="p"> Haptic guidance is usually employed in this case to increase the awareness of the robotic system state by displaying computed forces through a haptic device, which is also used to send commands to the robotic system. Haptic guidance may inform the user about the proximity to the system’s constraints (e.g., joint limits, singularities, collisions, etc.), suggesting motion directions that are free from constraints and safe for the task execution [<a class="xref bibr" href="#ref90"><span class="show-for-sr">Reference Selvaggio, Abi-Farraj, Pacchierotti, Giordano and Siciliano</span>90</a>, <a class="xref bibr" href="#ref91"><span class="show-for-sr">Reference Selvaggio, Giordano, Ficuciello and Siciliano</span>91</a>]. This may also be used to direct the user towards grasping poses that avoid constraints during post-grasping task trajectories [<a class="xref bibr" href="#ref94"><span class="show-for-sr">Reference Selvaggio, A.Ghalamzan, Moccia, Ficuciello and Siciliano</span>94</a>]. In addition to this, haptic guidance in the form of virtual fixtures may be employed when the application requires following paths with high precision, such as in hazardous industrial scenarios [<a class="xref bibr" href="#ref93"><span class="show-for-sr">Reference Selvaggio, Notomista, Chen, Gao, Trapani and Caldwell</span>93</a>] (see Fig. <a class="xref fig" href="#f6">6</a> (b)) or in surgical dissection scenarios [<a class="xref bibr" href="#ref92"><span class="show-for-sr">Reference Selvaggio, Fontanelli, Ficuciello, Villani and Siciliano</span>92</a>] (see Fig. <a class="xref fig" href="#f6">6</a> (c)). More recently, we have developed shared control methods for a remote robotic system performing a dynamic non-prehensile object transportation task, where haptic guidance was used to inform the user about proximity to the sliding condition [<a class="xref bibr" href="#ref19"><span class="show-for-sr">Reference Selvaggio, Cacace, Pacchierotti, Ruggiero and Giordano</span>19</a>] (see Fig. <a class="xref fig" href="#f6">6</a> (d)).</p><div class="table-wrap" data-magellan-destination="tblIV" id="tblIV"> <div class="caption"> <p class="p"><span class="label">Table IV.</span> Summary of PRISMA Lab contributions in the field of AI and cognitive robotics.</p> </div> <span> <div class="figure-thumb"><img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab4.png?pub-status=live" class="aop-lazy-load-image" width="676" height="516" data-original-image="/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab4.png" data-zoomable="true"></div> </span> </div> </div> </div> <div class="sec other" data-magellan-destination="s5" id="s5"> <h2 class="A"><span class="label">5.</span> AI and cognitive robotics</h2> <p class="p"> In order for a robot to autonomously or cooperatively perform complex tasks in the real world its control system should be endowed with cognitive capabilities enabling deliberation, execution, learning, and perception in dynamic, interactive, and unstructured environments [<a class="xref bibr" href="#ref95"><span class="show-for-sr">Reference Rodriguez-Guerra, Sorrosal, Cabanes and Calleja</span>95</a>, <a class="xref bibr" href="#ref96"><span class="show-for-sr">Reference Schultheis and Cooper</span>96</a>]. Cognitive robotics [<a class="xref bibr" href="#ref97"><span class="show-for-sr">Reference Beetz, Beßler, Haidu, Pomarlan, Bozcuoğlu and Bartels</span>97</a>, <a class="xref bibr" href="#ref98"><span class="show-for-sr">Reference Lemaignan, Warnier, Sisbot, Clodic and Alami</span>98</a>] is concerned with these issues proposing architectures and methods for seamlessly integrating sensorimotor, cognitive, and interaction abilities in autonomous/interactive robots. Exploring these topics involves various research areas across AI and robotics. Flexible orchestration, execution, and monitoring of structured tasks is a particularly relevant aspect of robotics [<a class="xref bibr" href="#ref99"><span class="show-for-sr">Reference Beßler, Porzel, Pomarlan, Beetz, Malaka and Bateman</span>99</a>, <a class="xref bibr" href="#ref100"><span class="show-for-sr">Reference de la Cruz, Piater and Saveriano</span>100</a>]. Current AI and robotics literature mostly relies on integrated planning and execution frameworks to address adaptive execution of complex activities [<a class="xref bibr" href="#ref101"><span class="show-for-sr">Reference Carbone, Finzi, Orlandini and Pirri</span>101</a>, <a class="xref bibr" href="#ref102"><span class="show-for-sr">Reference Karpas, Levine, Yu and Williams</span>102</a>]. On the other hand, cognitive control models and methods [<a class="xref bibr" href="#ref103"><span class="show-for-sr">Reference Botvinick, Braver, Barch, Carter and Cohen</span>103</a>–<a class="xref bibr" href="#ref105"><span class="show-for-sr">Reference Cooper and Shallice</span>105</a>] can be deployed to improve robot autonomy as well HRI performance. In this direction, we are currently investigating these methods to develop a cognitive control framework suitable for human-robot collaboration. Another relevant issue we are concerned with is the combination of symbolic and sub-symbolic approaches to incremental task learning [<a class="xref bibr" href="#ref106"><span class="show-for-sr">Reference Petrík, Tapaswi, Laptev and Sivic</span>106</a>, <a class="xref bibr" href="#ref107"><span class="show-for-sr">Reference Ramirez-Amaro, Yang and Cheng</span>107</a>] and task and motion planning [<a class="xref bibr" href="#ref108"><span class="show-for-sr">Reference Mansouri, Pecora and Schüller</span>108</a>]. In Table <a class="xref table" href="#tblIV">IV</a>, we provide an overview of recent research activities related to these aspects. These works and results are further described and discussed in the following sections and categorized in Table <a class="xref table" href="#tblIV">IV</a>.</p> <div class="sec" data-magellan-destination="s5-1" id="s5-1"> <h3 class="B"><span class="label">5.1.</span> Flexible and collaborative execution of multiple tasks</h3> <p class="p"> An autonomous and collaborative robotic system is expected to flexibly execute multiple structured tasks while adeptly handling unexpected events and behaviors. In cognitive psychology and neuroscience, the executive mechanisms needed to support flexible, adaptive responses, and complex goal-directed cognitive processes are associated with the concept of cognitive control [<a class="xref bibr" href="#ref103"><span class="show-for-sr">Reference Botvinick, Braver, Barch, Carter and Cohen</span>103</a>]. Despite their relevance in cognitive science, cognitive control models have seldom been integrated into robotic systems. In this regard, we aim at combining classic AI and machine learning methods with cognitive control mechanisms to support flexible and situated adaptive orchestration of robotic activities as well as task planning and learning. In particular, we rely on a supervisory attentional system (SAS) [<a class="xref bibr" href="#ref105"><span class="show-for-sr">Reference Cooper and Shallice</span>105</a>, <a class="xref bibr" href="#ref122"><span class="show-for-sr">Reference Norman and Shallice</span>122</a>] to orchestrate the execution of hierarchically organized robotic behaviors. This paradigm seems particularly effective for both flexible plan execution and human-robot collaboration, in that it provides attention mechanisms considered as pivotal not only for task switching and regulation but also for human-human communication. Following this approach, we are currently developing a robotic cognitive control framework, based on the SAS paradigm, enabling multiple task orchestration execution, collaborative execution of structured tasks, and incremental task learning [<a class="xref bibr" href="#ref114"><span class="show-for-sr">Reference Caccavale and Finzi</span>114</a>]. In this direction, we proposed and developed a practical attention-based executive framework (see (a) in Fig. <a class="xref fig" href="#f5">5</a>), suitable for real-world collaborative robotic systems, which is also compatible with AI methods for planning, execution, learning, and HRI/communication. We show that the proposed framework supports flexible orchestration of multiple concurrent tasks hierarchically organized [<a class="xref bibr" href="#ref111"><span class="show-for-sr">Reference Caccavale and Finzi</span>111</a>, <a class="xref bibr" href="#ref112"><span class="show-for-sr">Reference Caccavale and Finzi</span>112</a>] and natural human-robot collaborative execution of structured activities [<a class="xref bibr" href="#ref114"><span class="show-for-sr">Reference Caccavale and Finzi</span>114</a>], in that it allows fast and adaptive responses to unexpected events while reducing replanning [<a class="xref bibr" href="#ref110"><span class="show-for-sr">Reference Caccavale, Cacace, Fiore, Alami and Finzi</span>110</a>] and supporting task-situated interpretation of the human interventions [<a class="xref bibr" href="#ref74"><span class="show-for-sr">Reference Cacace, Caccavale, Finzi and Lippiello</span>74</a>, <a class="xref bibr" href="#ref115"><span class="show-for-sr">Reference Caccavale, Leone, Lucignano, Rossi, Staffa and Finzi</span>115</a>] (e.g., human pointing gestures as in (b) Fig. <a class="xref fig" href="#f5">5</a>). Attentional mechanisms are also effective in improving users’ situation awareness and interpretation of robot behaviors by regulating or adjusting human-robot communication depending on the executive context [<a class="xref bibr" href="#ref109"><span class="show-for-sr">Reference Cacace, Caccavale, Finzi and Lippiello</span>109</a>] or to support explainability during human-robot collaboration [<a class="xref bibr" href="#ref113"><span class="show-for-sr">Reference Caccavale and Finzi</span>113</a>].</p> </div> <div class="sec" data-magellan-destination="s5-2" id="s5-2"> <h3 class="B"><span class="label">5.2.</span> Task learning and teaching</h3> <p class="p"> Attention-based task supervision and execution provide natural and effective support to task teaching and learning from demonstrations [<a class="xref bibr" href="#ref114"><span class="show-for-sr">Reference Caccavale and Finzi</span>114</a>]. In [<a class="xref bibr" href="#ref117"><span class="show-for-sr">Reference Caccavale, Saveriano, Finzi and Lee</span>117</a>], we proposed a framework enabling kinesthetic teaching of hierarchical tasks starting from abstract/incomplete descriptions: the human physical demonstration (as in (c) Fig. <a class="xref fig" href="#f5">5</a>) is segmented into low-level controllers while a supervisory attentional system associates the generated segments to the abstract task structure, providing it with concrete/executable primitives. In this context, attentional manipulation (object or verbal cueing) can be exploited by the human to facilitate the matching between (top-down) proposed tasks/subtasks and (bottom-up) generated segments/models. Such an approach was also extended to the imitation learning of dual-arm structured robotic tasks [<a class="xref bibr" href="#ref118"><span class="show-for-sr">Reference Caccavale, Saveriano, Fontanelli, Ficuciello, Lee and Finzi</span>118</a>]. Attentional top-down and bottom-up regulations can also be learned from the demonstration. In [<a class="xref bibr" href="#ref116"><span class="show-for-sr">Reference Caccavale and Finzi</span>116</a>], robotic task structures are associated with a multi-layered feed-forward neural network whose nodes/edges represent actions/relations to be executed in so combining neural-based learning and symbolic activities. Multi-robot task learning issues were also explored. In [<a class="xref bibr" href="#ref119"><span class="show-for-sr">Reference Caccavale, Ermini, Fedeli, Finzi, Lippiello and Tavano</span>119</a>], a reinforcement deep Q-learning approach was proposed to guide a group of sanitizing robots in cleaning railway stations with dynamic priorities. This approach was also extended to prioritized cleaning with heterogeneous teams of robots [<a class="xref bibr" href="#ref120"><span class="show-for-sr">Reference Caccavale, Ermini, Fedeli, Finzi, Lippiello and Tavano</span>120</a>].</p> </div> <div class="sec" data-magellan-destination="s5-3" id="s5-3"> <h3 class="B"><span class="label">5.3.</span> Combined task and motion planning</h3> <p class="p"> Task and motion planning in robotics are typically handled by separate methods, with high-level task planners generating abstract actions and motion planners specifying concrete motions. These two planning processes are, however, strictly interdependent, and various approaches have been proposed in the literature to efficiently generate combined plans [<a class="xref bibr" href="#ref108"><span class="show-for-sr">Reference Mansouri, Pecora and Schüller</span>108</a>]. Recently, we started to investigate how sampling-based methods such as Rapidly Exploring Random Trees (RRTs), commonly employed for motion planning, can be leveraged to generate task and motion plans within a metric space where both symbolic (task) and sub-symbolic (motion) spaces are represented [<a class="xref bibr" href="#ref121"><span class="show-for-sr">Reference Caccavale and Finzi</span>121</a>]. The notion of distance defined in this extended metric space is then exploited to guide the expansion of the RRT to generate plans including both symbolic actions and feasible movements in the configuration space (see (d) in Fig. <a class="xref fig" href="#f5">5</a>). Empirical results collected in mobile robotics case studies suggest that the approach is feasible in realistic scenarios, while its effectiveness is more emphasized in complex and cluttered environments.</p><div class="table-wrap" data-magellan-destination="tblV" id="tblV"> <div class="caption"> <p class="p"><span class="label">Table V.</span> Summary of PRISMA Lab contributions in the field of industrial robotics.</p> </div> <span> <div class="figure-thumb"><img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab5.png?pub-status=live" class="aop-lazy-load-image" width="677" height="269" data-original-image="/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab5.png" data-zoomable="true"></div> </span> </div> </div> </div> <div class="sec other" data-magellan-destination="s6" id="s6"> <h2 class="A"><span class="label">6.</span> Industrial robotics</h2> <p class="p"> In industry, logistics aims at optimizing the flow of goods inside the large-scale distribution. The task of unloading carton cases from a pallet, usually referred to as depalletizing, yields several technological challenges [<a class="xref bibr" href="#ref123"><span class="show-for-sr">Reference Echelmeyer, Kirchheim and Wellbrock</span>123</a>] due to the heterogeneous nature of the cases that can present different dimensions, shapes, weights, and textures. This is the case in supermarkets where the products are stored on mixed pallets, which are pallets made of heterogeneous cases. On the other side, the literature review is mainly focused on the easier task of depalletizing homogeneous pallets, which are pallets made of standardized and equal cases. For instance, AI-enabled depalletizing systems were proposed to address problems of motion planning [<a class="xref bibr" href="#ref124"><span class="show-for-sr">Reference Sakamoto, Harada and Wan</span>124</a>] and safety [<a class="xref bibr" href="#ref125"><span class="show-for-sr">Reference Jocas, Kurrek, Zoghlami, Gianni and Salehi</span>125</a>]. In [<a class="xref bibr" href="#ref126"><span class="show-for-sr">Reference Nakamoto, Eto, Sonoura, Tanaka and Ogawa</span>126</a>], the use of target plane extraction from depth images and package border detection via brightness images to recognize various packages stacked complicatedly was proposed. A similar perception system can be found also in [<a class="xref bibr" href="#ref127"><span class="show-for-sr">Reference Schwarz, Milan, Periyasamy and Behnke</span>127</a>], where a deep-learning approach that combines object detection and semantic segmentation was applied to pick bins in cluttered warehouse scenarios. In this case, a specific data-reduction method was deployed to reduce the dimension of the dataset but several images of objects are still needed, impairing its usage by non-expert operators. Moreover, in [<a class="xref bibr" href="#ref128"><span class="show-for-sr">Reference Katsoulas and Kosmopoulos</span>128</a>] a system comprising an industrial robot and time-of-flight laser sensors was used to perform the depalletizing task. Some examples of specific gripping solutions developed to address both depalletizing and palletizing tasks (the task of loading cases to assemble a pallet) in highly structured industrial environments include: the robotic manipulator proposed in [<a class="xref bibr" href="#ref129"><span class="show-for-sr">Reference Krug, Stoyanov, Tincani, Andreasson, Mosberger, Fantoni and Lilienthal</span>129</a>], the suction systems applied on an autonomous robot capable of picking standard boxes from the upper side and placing them on a conveyance line proposed in [<a class="xref bibr" href="#ref126"><span class="show-for-sr">Reference Nakamoto, Eto, Sonoura, Tanaka and Ogawa</span>126</a>, <a class="xref bibr" href="#ref130"><span class="show-for-sr">Reference Tanaka, Ogawa, Nakamoto, Sonoura and Eto</span>130</a>], as well as the flexible robotic palletizer presented in [<a class="xref bibr" href="#ref131"><span class="show-for-sr">Reference Moura and Silva</span>131</a>]. Table <a class="xref table" href="#tblV">V</a> provides an overview of the work done in this field.</p> <div class="sec" data-magellan-destination="s6-1" id="s6-1"> <h3 class="B"><span class="label">6.1.</span> Logistics</h3> <p class="p"> A common activity in logistics is to depalletize goods from shipping pallets. This task, which is hard and uncomfortable for human operators, is often performed by robotic depalletizing systems. These automated solutions are very effective in well-structured environments, however, there are more complex situations, such as depalletizing of mixed pallets in supermarkets, which still represent a challenge for robotic systems. In recent years, we studied the problem of depalletizing mixed and randomly organized pallets by proposing a robotic depalletizing system [<a class="xref bibr" href="#ref132"><span class="show-for-sr">Reference Caccavale, Arpenti, Paduano, Fontanellli, Lippiello, Villani and Siciliano</span>132</a>] integrating attentional mechanisms from Sec. <a class="xref sec" href="#s5">5</a> to flexibly schedule, monitor, and adapt the depalletizing process considering online perceptual information from non-invasive sensors as well as high-level constraints that can be provided by supervising users or management systems.</p> <p class="p"> Such flexible depalletizing processes also require strong perceptive capabilities. To this end, in [<a class="xref bibr" href="#ref133"><span class="show-for-sr">Reference Arpenti, Caccavale, Paduano, Fontanelli, Lippiello, Villani and Siciliano</span>133</a>] a single-camera system was proposed, where RGB-D data were used for the detection, recognition, and localization of heterogeneous cases, both textured and untextured, in a mixed pallet. Specifically, a priori information about the content of the pallet (the product barcode, the number of instances of a given product case in the pallet, the dimensions of the cases, and the images of the textured cases) was combined with data from the RGB-D camera, exploiting a pipeline of 2D and 3D model-based computer vision algorithms, as shown in Fig. <a class="xref fig" href="#f7">7</a>, left. The integration of such a system into logistic chains was simplified by the short dataset required, based only on the images of the cases in the current pallet, and on a single image from a single RGB-D sensor.</p> <p class="p"> In addition to cognitive and perceptual capabilities, depalletizing robotic systems also requires a high degree of dexterity to effectively grasp mixed cases with complex shapes. In [<a class="xref bibr" href="#ref134"><span class="show-for-sr">Reference Fontanelli, Paduano, Caccavale, Arpenti, Lippiello, Villani and Siciliano</span>134</a>], we proposed a sensorized gripper, designed to be assembled on the end-tip of an industrial robotic arm, that allowed grasping of cases either from above or from the lateral sides and was capable to adapt online its shape to different sizes of products.</p><section><div class="fig" data-magellan-destination="f7" id="f7"> <div class="figure-thumb"><img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig7.png?pub-status=live" class="aop-lazy-load-image" width="2833" height="1203" data-original-image="/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig7.png" data-zoomable="true"></div> <div class="caption"><p class="p"> </p><p class="p"><span class="label">Figure 7.</span> Overall picture of the logistic scenario including an abstract representation of vision-based recognition and localization algorithm (left), snapshot of the robotic depalletizing cell (right) with highlighted detail of the gripping tool (red window).</p> </div></div></section> </div> </div> <div class="sec other" data-magellan-destination="s7" id="s7"> <h2 class="A"><span class="label">7.</span> Medical robotics</h2> <p class="p"> Medical robotics is a fast-growing field that integrates the principles of robotics with healthcare to advance medical procedures and enhance patient outcomes. Its primary objective is to develop cutting-edge robotic systems, devices, and technologies that cater to a wide range of medical domains, including surgery, rehabilitation, diagnosis, and patient care. In the realm of medical robotics, surgical robotics stands out as a specialized field dedicated to the development and application of robotic systems in surgical procedures. In this context, prioritizing safety is crucial, especially in robotic systems categorized as critical, where it serves as a fundamental design focus. In the quest for heightened safety and decreased cognitive burden, the shared control paradigm has played a crucial role, notably with the integration of active constraints. This methodology has given rise to specialized applications like Virtual Fixtures (VFs), which have garnered increasing popularity in recent years [<a class="xref bibr" href="#ref135"><span class="show-for-sr">Reference Bowyer, Davies and Baena</span>135</a>]. VFs act as virtual overlays, delivering guidance and support to surgeons during procedures and offering a diverse array of functionalities. When integrated with haptic feedback or guidance, the use of VFs in surgical teleoperated robots frequently offers active assistance to the surgeon through force rendering at the master side. As an example, Li et al. introduced an online collision avoidance method for the real-time interactive control of a surgical robot in complex environments, like the sinus cavities [<a class="xref bibr" href="#ref136"><span class="show-for-sr">Reference Li, Ishii and Taylor</span>136</a>]. The push for autonomous tasks in surgery stems from a drive to enhance precision and efficiency while relieving surgeons of cognitive workload in minimally invasive procedures. The advancement of surgical robots frequently entails the creation of innovative control laws using constrained optimization techniques [<a class="xref bibr" href="#ref137"><span class="show-for-sr">Reference Marinho, Adorno, k. and Mitsuishi</span>137</a>]. Ensuring the safety of robots in dynamic environments, particularly in robotics, has been significantly aided by the emergence of the Control Barrier Functions (CBFs) framework, as highlighted in [<a class="xref bibr" href="#ref138"><span class="show-for-sr">Reference Ames, Coogan, Egerstedt, Notomista, Sreenath and Tabuada</span>138</a>]. Advances in surgical robotics research extend beyond software applications, encompassing the innovation of hardware devices designed to streamline surgeons’ tasks and elevate their performance capabilities. A motorized hand offers an ergonomic alternative, and researched sensor designs prioritize force sensation for advantages in robotic surgery, such as injury reduction and palpation empowerment [<a class="xref bibr" href="#ref139"><span class="show-for-sr">Reference Kim, Kim, Seok, So and Choi</span>139</a>, <a class="xref bibr" href="#ref140"><span class="show-for-sr">Reference Lee, Kim, Gulrez, Yoon, Hannaford and Choi</span>140</a>]. In addition to surgical applications, medical robotic research has also advanced the development of sophisticated devices for artificial limbs. Drawing inspiration from the human hand, robotic hands have incorporated compliance and sensors through various technological solutions to enhance robustness by absorbing external impact and improve capabilities in object grasping and manipulation [<a class="xref bibr" href="#ref141"><span class="show-for-sr">Reference Catalano, Grioli, Farnioli, Serio, Piazza and Bicchi</span>141</a>, <a class="xref bibr" href="#ref142"><span class="show-for-sr">Reference Piazza, Catalano, Godfrey, Rossi, Grioli, Bianchi, Zhao and Bicchi</span>142</a>]. Table <a class="xref table" href="#tblVI">VI</a> provides a classification of the recent contributions to the field. </p><div class="table-wrap" data-magellan-destination="tblVI" id="tblVI"> <div class="caption"> <p class="p"><span class="label">Table VI.</span> Summary of PRISMA Lab contributions in the field of medical robotics.</p> </div> <span> <div class="figure-thumb"><img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab6.png?pub-status=live" class="aop-lazy-load-image" width="678" height="533" data-original-image="/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab6.png" data-zoomable="true"></div> </span> </div> <div class="sec" data-magellan-destination="s7-1" id="s7-1"> <h3 class="B"><span class="label">7.1.</span> Surgical robotics</h3> <p class="p"> Surgical robotics transformed surgery, progressing from open to minimally invasive and robot-assisted procedures. While open surgery involves large incisions and minimally invasive surgery uses small incisions, robot-assisted surgery utilizes robotic systems to enhance patient outcomes by reducing trauma, recovery times, and risks. However, there are ongoing constraints in accuracy, speed, dexterity, flexibility, and specialized skills. Research and development efforts are dedicated to overcoming these limitations and expanding the applications of robotic systems. Safety in surgical procedures is paramount, and advanced control systems with active constraints like VFs enhance safety and reduce cognitive load. VFs provide virtual guidance and assistance to surgeons through simulated barriers (Forbidden Regions Virtual Fixtures – FRVFs) and attractive forces (Guidance Virtual Fixtures – GVFs), improving surgical outcomes. A novel approach was employed for the precise dissection of polyps in surgical procedures, ensuring accurate detection of the region of interest and high-precision cutting with safety margins [<a class="xref bibr" href="#ref143"><span class="show-for-sr">Reference Moccia, Selvaggio, Villani, Siciliano and Ficuciello</span>143</a>]. The method utilized a control approach based on GVFs to constrain the robot’s motion along the dissection path. VFs were created using computer vision techniques, extracting control points from surgical scene images and dynamically updating them to adapt to environmental changes. The effectiveness of the approach was validated through experiments on the da Vinci Research Kit (dVRK) robot, an open-source platform based on the famous da Vinci<sup class="sup">®</sup> Surgical System. In the context of enhancing the suturing process with the dVRK robot, a similar approach was introduced, leveraging vision-based tracking techniques for precise needle tracking [<a class="xref bibr" href="#ref94"><span class="show-for-sr">Reference Selvaggio, A.Ghalamzan, Moccia, Ficuciello and Siciliano</span>94</a>]. The system was applied in conjunction with the haptic VF control technique using dVRK, mitigating the risk of joint limits and singularities during suturing. The optimal grasp pose was utilized to calculate force cues that guided the user’s hand through the Master Tool Manipulator. The paper in [<a class="xref bibr" href="#ref144"><span class="show-for-sr">Reference Moccia, Iacono, Siciliano and Ficuciello</span>144</a>] presented an example of FRVF application in the form of a surgical tools collision avoidance method. FRVFs were utilized to prevent tool collisions by generating a repulsive force for the surgeon. A marker-less tool tracking method employing a deep neural network architecture for tool segmentation was adopted (see Fig. <a class="xref fig" href="#f8">8</a>). This work proposed the use of an Extended Kalman Filter for pose estimation to enhance the robustness of VF application on the tool by incorporating both vision and kinematics information. Software applications are moving also toward increasing the autonomy in surgical robotics. For instance, the paper in [<a class="xref bibr" href="#ref148"><span class="show-for-sr">Reference Moccia and Ficuciello</span>148</a>] presented an autonomous endoscope control algorithm for the dVRK’s Endoscopic Camera Manipulator in surgical robotics. It employed Image-based Visual Servoing (IBVS) with additional constraints enforced by CBFs to ensure instrument visibility and prevent joint limit violations. Laparoscopic images were used, and deep learning was applied for semantic segmentation. The algorithm configured an IBVS controller and solved a convex optimization problem to satisfy the constraints. The solutions mentioned earlier were tested in a simulated environment using the CoppeliaSim software, with a particular focus on the presentation of the dVRK simulator [<a class="xref bibr" href="#ref149"><span class="show-for-sr">Reference Ferro, Brunori, Magistri, Saiella, Selvaggio and Fontanelli</span>149</a>, <a class="xref bibr" href="#ref150"><span class="show-for-sr">Reference Fontanelli, Selvaggio, Ferro, Ficuciello, Vendittelli and Siciliano</span>150</a>].</p> <p class="p"> Research advancements in surgical robotics encompass not only software applications but also the development of hardware devices that aim to facilitate surgeons’ jobs and enhance their performance. The MUSHA Hand II, a multifunctional surgical instrument with underactuated soft fingers ( [<a class="xref bibr" href="#ref151"><span class="show-for-sr">Reference Ghafoor, Dai and Duffy</span>151</a>]) and force sensors, was integrated into the da Vinci<sup class="sup">®</sup> robotic platform [<a class="xref bibr" href="#ref145"><span class="show-for-sr">Reference Liu, Selvaggio, Ferrentino, Moccia, Pirozzi, Bracale and Ficuciello</span>145</a>–<a class="xref bibr" href="#ref147"><span class="show-for-sr">Reference Selvaggio, Fontanelli, Marrazzo, Bracale, Irace, Breglio, Villani, Siciliano and Ficuciello</span>147</a>], shown in Fig. <a class="xref fig" href="#f8">8</a>. This innovative hand enhances the adaptability and functionality of the surgical system, addressing limitations in force sensing during robot-assisted surgery. Experimental validation was performed on the dVRK robotic testbed. The paper in refs. [<a class="xref bibr" href="#ref23"><span class="show-for-sr">Reference Fontanelli, Selvaggio, Buonocore, Ficuciello, Villani and Siciliano</span>23</a>, <a class="xref bibr" href="#ref152"><span class="show-for-sr">Reference Sallam, Fontanelli, Gallo, La Rocca, Di Spiezio Sardo, Longo and Ficuciello</span>152</a>] introduces a novel single-handed needle driver tool inspired by human hand-rolling abilities. It includes a working prototype and is tested with the dVRK surgical system. Robotic solutions are also created to solve specific surgical procedures, like prostate cancer biopsy. The paper in [<a class="xref bibr" href="#ref153"><span class="show-for-sr">Reference Coevoet, Adagolodjo, Lin, Duriez and Ficuciello</span>153</a>] presented a robotic solution for transrectal prostate biopsy, showcasing a soft-rigid robot manipulator with an integrated probe-needle assembly. The system included manual positioning of the probe and autonomous alignment of the needle, along with MRI-US fusion for improved visualization. Experimental validation was conducted using prostate phantoms.</p><section><div class="fig" data-magellan-destination="f8" id="f8"> <div class="figure-thumb"><img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig8.png?pub-status=live" class="aop-lazy-load-image" width="4251" height="1436" data-original-image="/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig8.png" data-zoomable="true"></div> <div class="caption"><p class="p"> </p><p class="p"><span class="label">Figure 8.</span> Left: a marker-less method tracks surgical tools, establishing VF geometry resembling to a cylinder with its central axis aligned with the instrument’s axis [<a class="xref bibr" href="#ref144"><span class="show-for-sr">Reference Moccia, Iacono, Siciliano and Ficuciello</span>144</a>]; right: the MUSHA Hand II surgical tool, integrated on the dVRK robot [<a class="xref bibr" href="#ref145"><span class="show-for-sr">Reference Liu, Selvaggio, Ferrentino, Moccia, Pirozzi, Bracale and Ficuciello</span>145</a>–<a class="xref bibr" href="#ref147"><span class="show-for-sr">Reference Selvaggio, Fontanelli, Marrazzo, Bracale, Irace, Breglio, Villani, Siciliano and Ficuciello</span>147</a>].</p> </div></div></section> </div> <div class="sec" data-magellan-destination="s7-2" id="s7-2"> <h3 class="B"><span class="label">7.2.</span> Robotic hands and prosthesis</h3> <p class="p"> Robotic artificial limbs have played a crucial role in aiding individuals with missing body parts to regain functionality in their daily life activities. The PRISMA Hand II, depicted in Fig. <a class="xref fig" href="#f9">9</a>, represented a mechanically robust anthropomorphic hand with high underactuation, utilizing three motors to drive 19 joints through elastic tendons. Its distinctive mechanical design facilitated adaptive grasping and in-hand manipulation, complemented by tactile/force sensors embedded in each fingertip. Based on optoelectronic technology, these sensors provided valuable tactile/force feedback during object manipulation, particularly for deformable objects. The paper in [<a class="xref bibr" href="#ref154"><span class="show-for-sr">Reference Canbay, Ferrentino, Liu, Moccia, Pirozzi, Siciliano and Ficuciello</span>154</a>] detailed the hand’s mechanical design, sensor technology, and proposed a calibration procedure for the tactile/force sensors. It included a comparison of various neural network architectures for sensor calibration, experimental tests to determine the optimal tactile sensing suite, and demonstrations of force regulation effectiveness using calibrated sensors. The paper also introduced a virtual simulator for users to undergo training sessions in controlling the prosthesis. Surface Electromyographic (sEMG) sensors captured muscle signals from the user, processed by a recognition algorithm to interpret the patient’s intentions [<a class="xref bibr" href="#ref155"><span class="show-for-sr">Reference Leccia, Sallam, Grazioso, Caporaso, Di Gironimo and Ficuciello</span>155</a>].</p><section><div class="fig" data-magellan-destination="f9" id="f9"> <div class="figure-thumb"><img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig9.png?pub-status=live" class="aop-lazy-load-image" width="4249" height="2159" data-original-image="/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig9.png" data-zoomable="true"></div> <div class="caption"><p class="p"> </p><p class="p"><span class="label">Figure 9.</span> The PRISMA Hand II and its capabilities. The grasping options are categorized into three sets: (a) lateral grasps, (b) pinches, and (c) power grasps [<a class="xref bibr" href="#ref154"><span class="show-for-sr">Reference Canbay, Ferrentino, Liu, Moccia, Pirozzi, Siciliano and Ficuciello</span>154</a>, <a class="xref bibr" href="#ref155"><span class="show-for-sr">Reference Leccia, Sallam, Grazioso, Caporaso, Di Gironimo and Ficuciello</span>155</a>].</p> </div></div></section> </div> </div> <div class="sec other" data-magellan-destination="s8" id="s8"> <h2 class="A"><span class="label">8.</span> Future Directions</h2> <div class="sec" data-magellan-destination="s8-1" id="s8-1"> <h3 class="B"><span class="label">8.1.</span> Dynamic manipulation and locomotion</h3> <p class="p"> Manipulation and locomotion represent two research areas that require explicit or implicit control of the interaction forces and the enforcement of the related frictional constraints. Mastering in-contact situations through accurate force regulation will allow legged or service robots of the future to perform several difficult tasks with unprecedented precision and robustness [<a class="xref bibr" href="#ref156"><span class="show-for-sr">Reference Gong, Sun, Nair, Bidwai, R., Grezmak, Sartoretti and Daltorio</span>156</a>]. These include dealing with time-varying or switching contacts with the environment and manipulating or locomoting on articulated, foldable, or even continuously deformable surfaces. In both fields, the synthesis of novel mechanisms is always a meaningful aspect [<a class="xref bibr" href="#ref157"><span class="show-for-sr">Reference Jia, Huang, Li, Wu, Cao and Guo</span>157</a>, <a class="xref bibr" href="#ref158"><span class="show-for-sr">Reference Jia, Huang, Wang and Li</span>158</a>]. Solving complex tasks requiring simultaneous locomotion and manipulation (commonly referred to as loco-manipulation) using, for example, quadruped robots equipped with an arm, is a very active topic of research. Future works should focus on optimizing the robustness of loco-manipulation trajectories against unknown external disturbances or develop control techniques for safe interaction with humans [<a class="xref bibr" href="#ref159"><span class="show-for-sr">Reference Bellicoso, Krämer, Stäuble, Sako, Jenelten, Bjelonic and Hutter</span>159</a>, <a class="xref bibr" href="#ref160"><span class="show-for-sr">Reference Ferrolho, Ivan, Merkt, Havoutis and Vijayakumar</span>160</a>]. This will raise the need for improving proprioceptive and exteroceptive perception techniques to accurately retrieve the actual state of the robot and the environment in contact. The combined use of multiple vision, force and tactile sensors, and fusion techniques constitute a promising approach in this direction [<a class="xref bibr" href="#ref161"><span class="show-for-sr">Reference Costanzo, Natale and Selvaggio</span>161</a>]. Another future research direction includes the development of improved policy representation and learning or planning frameworks to handle difficult tasks. In other words, finding mappings from the task requirements and sensor feedback to controller inputs for in-contact tasks is still carried out with difficulties. The development of an accurate yet fast physics engine to simulate in-contact tasks with constrained environments will favor this and allow for better policy transfer to handle difficult tasks that can be learned in simulation before being deployed to the real world.</p> </div> <div class="sec" data-magellan-destination="s8-2" id="s8-2"> <h3 class="B"><span class="label">8.2.</span> Aerial robotics</h3> <p class="p"> Energy saving, safety in the interactions with people and objects, accuracy, and reliable decisional autonomy pose significant limitations in aerial systems. Future challenges involve power consumption and short-lived batteries, while uncertified devices prompt safety restrictions. Several roadmaps emphasize the need for aerial devices to function in real-world scenarios, facing inclement weather and requiring proper certifications. Mechatronics is crucial for both UAMs. Despite progress, challenges persist in enhancing safety and energy efficiency. Integrating mechanical design and control is essential, with a lack of research on the optimal positioning of grasping tools for UAMs. Hybrid mechatronic solutions are potential avenues for improvement.</p> <p class="p"> Opportunities come from inspection and maintenance tasks for aerial manipulators, such as replacing human operators in remote locations, handling hazardous tasks, and increasing plant inspections. Achieving these goals requires addressing outlined issues and improving environmental performance. While aerial manipulation activities are primarily in academia, recent European-funded projects like AIRobots, ARCAS, SHERPA, EuRoC, Aeroworks, AEROARMS, AERO-TRAIN, and AERIAL-CORE aim to bridge the gap between academia and industry. The AEROARMS project received the European Commission Innovation Radar Prize, showcasing advancements. However, the technology migration remains a challenging journey.</p> </div> <div class="sec" data-magellan-destination="s8-3" id="s8-3"> <h3 class="B"><span class="label">8.3.</span> Physical human-robot interaction</h3> <p class="p"> In future works, the proposed HRI frameworks can be extended to integrate multiple interaction modalities other than physical. For instance, visual and audio feedback may provide additional information about the robot’s state to improve readability, safety, and reliability during the assisted modes. In addition, gesture-based and speech-based interaction modalities may complement physical interaction to enable a more natural human-robot communication, while enhancing the robustness of intention estimation.</p> </div> <div class="sec" data-magellan-destination="s8-4" id="s8-4"> <h3 class="B"><span class="label">8.4.</span> AI and cognitive robotics</h3> <p class="p"> In our ongoing research activities, we aim to develop an integrated robotic executive framework supporting long-term autonomy in complex operative scenarios. For this purpose, our goal is to investigate incremental task teaching and adaptation methods, progressing from primitive to complex robotic tasks. In this direction, symbolic and sub-symbolic learning methods can be integrated to simultaneously learn hierarchical tasks, sensorimotor processes, and attention regulations through human demonstrations and environmental interaction. In this setting, effective mechanisms are also needed to retrieve and reuse learned tasks depending on the operational and the environmental context. Concerning natural human-robot collaboration, we are currently investigating additional attention mechanisms (e.g., joint attention, active perception, affordances, etc.) that play a crucial role in supporting task teaching and adaptive execution. Regarding combined task and motion planning methods, our aim is to formulate more sophisticated metrics and to address hierarchically structured tasks of mobile manipulation.</p> </div> <div class="sec" data-magellan-destination="s8-5" id="s8-5"> <h3 class="B"><span class="label">8.5.</span> Industrial robotics</h3> <p class="p"> As a future research direction, the flexible and adaptive architecture for depalletizing tasks in supermarkets proposed in [<a class="xref bibr" href="#ref132"><span class="show-for-sr">Reference Caccavale, Arpenti, Paduano, Fontanellli, Lippiello, Villani and Siciliano</span>132</a>] will be extended also to palletizing tasks or other industrial scenarios, such as packaging [<a class="xref bibr" href="#ref162"><span class="show-for-sr">Reference Dai and Caldwell</span>162</a>]. Moreover, more complex environmental conditions along with more sophisticated task structures including safety constraints and fault detection/correction will be investigated. Regarding the vision side, the segmentation accuracy, as well as, the depalletization speed of the algorithms deployed in the framework [<a class="xref bibr" href="#ref133"><span class="show-for-sr">Reference Arpenti, Caccavale, Paduano, Fontanelli, Lippiello, Villani and Siciliano</span>133</a>] will be exhaustively compared with the performance of convolutional neural networks and support vector machines. Besides, multiple images from different perspectives will be exploited in a multi-camera approach to better estimate the poses of the cases. Regarding the gripping tool [<a class="xref bibr" href="#ref134"><span class="show-for-sr">Reference Fontanelli, Paduano, Caccavale, Arpenti, Lippiello, Villani and Siciliano</span>134</a>], more compact suction systems will be developed to find the best tradeoff between dimensions, weight, and effectiveness for each type of product.</p> </div> <div class="sec" data-magellan-destination="s8-6" id="s8-6"> <h3 class="B"><span class="label">8.6.</span> Medical robotics</h3> <p class="p"> Charting the course for the future of medical robotics, especially in the surgical domain, entails a pivotal shift towards the incorporation of cutting-edge AI techniques. This evolution seeks to broaden the applicability of proposed methodologies to embrace realistic surgical scenarios, effectively navigating challenges posed by tissue deformation and occlusions. Rigorous studies on medical procedures will be conducted to precisely define safety standards, ensuring a meticulous approach to healthcare practices. As a conclusive step, collaborative validation with surgeons will serve as a tangible testament to the effectiveness of the proposed pipelines, affirming their real-world impact in enhancing surgical precision and safety. In the realm of advancing robotic surgical instruments and artificial limbs, future trajectories point towards expanding the capabilities of proposed devices to cater to more specific scenarios. This evolution involves a strategic integration of tailored characteristics, incorporating cutting-edge sensing technologies and intelligent control strategies. Having demonstrated the potential applications of these devices, the ongoing endeavor is to refine their design for optimal performance across an array of surgical tasks. The ultimate objective lies in seamlessly transferring these innovations from the realm of development to practical clinical applications, ushering in a new era of enhanced surgical precision and functional prosthetic applications.</p> </div> </div> <div class="sec conclusions" data-magellan-destination="s9" id="s9"> <h2 class="A"><span class="label">9.</span> Conclusion</h2> <p class="p"> In this article, we overviewed the main results achieved by the robotics research carried out at the PRISMA Lab of the University of Naples Federico II during the last decade. After a brief overview, the key contributions to the six research areas of dynamic manipulation and locomotion, aerial robotics, physical HRI, AI and cognitive robotics, industrial robotics, and medical robotics were briefly reported and discussed together with future research directions. We highlighted the main achievements in each of these areas, categorizing the adopted methodologies and the key contributions in the fields.</p> <p class="p"> Our dream and goal for the future is to make scientific and technological research advancements in all the considered areas more accessible to other people around the world who may be able to use it for their purposes or needs. From this, significant breakthroughs are expected in the future for the industry, health, education, economic, and social sectors.</p> </div> </div> <div class="back"> <div class="sec other" data-magellan-destination="s10" id="s10"> <h2 class="A"> Author contribution</h2> <p class="p"> Bruno Siciliano conceived the article. Mario Selvaggio, Rocco Moccia, Pierluigi Arpenti, Riccardo Caccavale, and Fabio Ruggiero wrote the manuscript under the supervision of the rest of the authors who reviewed and edited it.</p> </div> <div class="sec funding-statement" data-magellan-destination="s11" id="s11"> <h2 class="A"> Financial support</h2> <p class="p"> The research leading to these results has been partially supported by the following projects: COWBOT, grant 2020NH7EAZ_002, PRIN 2020; AI-DROW, grant 2022BYSBYX, PRIN 2022 PNRR, European Union – NextGenerationEU; Harmony, grant 101017008, European Union’s Horizon 2020; Inverse, grant 101136067, and euROBIN, grant 101070596, European Union’s Horizon Europe; BRIEF, IC IR0000036, National Recovery and Resilience Plan, Mission 4 Component 2 Investment 3.1 of Italian Ministry of University and Research funded by the European Union – NextGenerationEU.</p> <p class="p"> The views and opinions expressed are only those of the authors and do not necessarily reflect those of the funding agencies.</p> </div> <div class="sec coi-statement" data-magellan-destination="s12" id="s12"> <h2 class="A"> Competing interests</h2> <p class="p"> The authors declare no competing interests exist.</p> </div> <div class="sec other" data-magellan-destination="s13" id="s13"> <h2 class="A"> Ethical approval</h2> <p class="p"> None.</p> </div> </div> </div></div> <!----> <!----> <hr aria-hidden="true" class="list-divider separator default" data-v-7036083a> <div id="references-list" class="circle-list"><h2>References</h2> <div id="ref1" aria-flowto="reference-1-content reference-1-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 1 in the content" id="reference-1-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [1] </div> <div class="circle-list__item__grouped"><div id="reference-1-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Suomalainen</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Karayiannidis</span>, <span class="given-names">Y.</span></span> and <span class="string-name"><span class="surname">Kyrki</span>, <span class="given-names">V.</span></span>, “<span class="article-title">A survey of robot manipulation in contact</span>,” <span class="source">Robot Auton Syst</span> <span class="volume">156</span>, <span class="fpage">104224</span> (<span class="year">2022</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for A survey of robot manipulation in contact' href=https://dx.doi.org/10.1016/j.robot.2022.104224>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for A survey of robot manipulation in contact' href=https://scholar.google.com/scholar_lookup?title=A+survey+of+robot+manipulation+in+contact&author=Suomalainen+M.&author=Karayiannidis+Y.&author=Kyrki+V.&publication+year=2022&journal=Robot+Auton+Syst&volume=156&doi=10.1016%2Fj.robot.2022.104224>Google Scholar</a></div></div></div><div id="ref2" aria-flowto="reference-2-content reference-2-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 2 in the content" id="reference-2-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [2] </div> <div class="circle-list__item__grouped"><div id="reference-2-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Yang</span>, <span class="given-names">C.</span></span>, <span class="string-name"><span class="surname">Zhang</span>, <span class="given-names">B.</span></span>, <span class="string-name"><span class="surname">Zeng</span>, <span class="given-names">J.</span></span>, <span class="string-name"><span class="surname">Agrawal</span>, <span class="given-names">A.</span></span> and <span class="string-name"><span class="surname">Sreenath</span>, <span class="given-names">K.</span></span>, “<span class="article-title">Dynamic Legged Manipulation of a Ball Through Multi-Contact Optimization</span>,” In: <em class="italic">IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)</em>, (<span class="year">2020</span>) pp. <span class="fpage">7513</span>–<span class="lpage">7520</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Dynamic Legged Manipulation of a Ball Through Multi-Contact Optimization' href=https://scholar.google.com/scholar_lookup?title=Dynamic+Legged+Manipulation+of+a+Ball+Through+Multi-Contact+Optimization&author=Yang+C.&author=Zhang+B.&author=Zeng+J.&author=Agrawal+A.&author=Sreenath+K.&publication+year=2020>Google Scholar</a></div></div></div><div id="ref3" aria-flowto="reference-3-content reference-3-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 3 in the content" id="reference-3-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [3] </div> <div class="circle-list__item__grouped"><div id="reference-3-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Serra</span>, <span class="given-names">D.</span></span>, <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Donaire</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Buonocore</span>, <span class="given-names">L. R.</span></span>, <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span> and <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span>, “<span class="article-title">Control of nonprehensile planar rolling manipulation: A passivity-based approach</span>,” <span class="source">IEEE Trans Robot</span> <span class="volume">35</span>(<span class="issue">2</span>), <span class="fpage">317</span>–<span class="lpage">329</span> (<span class="year">2019</span>a).<a class='ref-link' target='_blank' aria-label='CrossRef link for Control of nonprehensile planar rolling manipulation: A passivity-based approach' href=https://dx.doi.org/10.1109/TRO.2018.2887356>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Control of nonprehensile planar rolling manipulation: A passivity-based approach' href=https://scholar.google.com/scholar_lookup?title=Control+of+nonprehensile+planar+rolling+manipulation%3A+A+passivity-based+approach&author=Serra+D.&author=Ruggiero+F.&author=Donaire+A.&author=Buonocore+L.+R.&author=Lippiello+V.&author=Siciliano+B.&publication+year=2019&journal=IEEE+Trans+Robot&volume=35&doi=10.1109%2FTRO.2018.2887356&pages=317-329>Google Scholar</a></div></div></div><div id="ref4" aria-flowto="reference-4-content reference-4-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 4 in the content" id="reference-4-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [4] </div> <div class="circle-list__item__grouped"><div id="reference-4-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Chai</span>, <span class="given-names">C.-Y.</span></span>, <span class="string-name"><span class="surname">Peng</span>, <span class="given-names">W.-H.</span></span> and <span class="string-name"><span class="surname">Tsao</span>, <span class="given-names">S.-L.</span></span>, “<span class="article-title">Object rearrangement through planar pushing: A theoretical analysis and validation</span>,” <span class="source">IEEE T Robot</span> <span class="volume">38</span>(<span class="issue">5</span>), <span class="fpage">2703</span>–<span class="lpage">2719</span> (<span class="year">2022</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Object rearrangement through planar pushing: A theoretical analysis and validation' href=https://dx.doi.org/10.1109/TRO.2022.3153785>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Object rearrangement through planar pushing: A theoretical analysis and validation' href=https://scholar.google.com/scholar_lookup?title=Object+rearrangement+through+planar+pushing%3A+A+theoretical+analysis+and+validation&author=Chai+C.-Y.&author=Peng+W.-H.&author=Tsao+S.-L.&publication+year=2022&journal=IEEE+T+Robot&volume=38&doi=10.1109%2FTRO.2022.3153785&pages=2703-2719>Google Scholar</a></div></div></div><div id="ref5" aria-flowto="reference-5-content reference-5-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 5 in the content" id="reference-5-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [5] </div> <div class="circle-list__item__grouped"><div id="reference-5-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Satici</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span> and <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span>, “<span class="article-title">Coordinate-Free Framework for Robotic Pizza Tossing and Catching</span>,” In: <em class="italic">IEEE International Conference on Robotics and Automation</em>, (<span class="year">2016</span>) pp. <span class="fpage">3932</span>–<span class="lpage">3939</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Coordinate-Free Framework for Robotic Pizza Tossing and Catching' href=https://scholar.google.com/scholar_lookup?title=Coordinate-Free+Framework+for+Robotic+Pizza+Tossing+and+Catching&author=Satici+A.&author=Ruggiero+F.&author=Lippiello+V.&author=Siciliano+B.&publication+year=2016>Google Scholar</a></div></div></div><div id="ref6" aria-flowto="reference-6-content reference-6-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 6 in the content" id="reference-6-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [6] </div> <div class="circle-list__item__grouped"><div id="reference-6-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span> and <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span>, “<span class="article-title">Nonprehensile dynamic manipulation: A survey</span>,” <span class="source">IEEE Robot Auto Lett</span> <span class="volume">3</span>(<span class="issue">3</span>), <span class="fpage">1711</span>–<span class="lpage">1718</span>(<span class="year">2018</span>b).<a class='ref-link' target='_blank' aria-label='CrossRef link for Nonprehensile dynamic manipulation: A survey' href=https://dx.doi.org/10.1109/LRA.2018.2801939>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Nonprehensile dynamic manipulation: A survey' href=https://scholar.google.com/scholar_lookup?title=Nonprehensile+dynamic+manipulation%3A+A+survey&author=Ruggiero+F.&author=Lippiello+V.&author=Siciliano+B.&publication+year=2018&journal=IEEE+Robot+Auto+Lett&volume=3&doi=10.1109%2FLRA.2018.2801939&pages=1711-1718>Google Scholar</a></div></div></div><div id="ref7" aria-flowto="reference-7-content reference-7-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 7 in the content" id="reference-7-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [7] </div> <div class="circle-list__item__grouped"><div id="reference-7-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Sardain</span>, <span class="given-names">P.</span></span> and <span class="string-name"><span class="surname">Bessonnet</span>, <span class="given-names">G.</span></span>, “<span class="article-title">Forces acting on a biped robot. center of pressure-zero moment point</span>,” <span class="source">IEEE Trans Syst, Man, Cyber - Part A: Syst Humans</span> <span class="volume">34</span>(<span class="issue">5</span>), <span class="fpage">630</span>–<span class="lpage">637</span> (<span class="year">2004</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Forces acting on a biped robot. center of pressure-zero moment point' href=https://dx.doi.org/10.1109/TSMCA.2004.832811>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Forces acting on a biped robot. center of pressure-zero moment point' href=https://scholar.google.com/scholar_lookup?title=Forces+acting+on+a+biped+robot.+center+of+pressure-zero+moment+point&author=Sardain+P.&author=Bessonnet+G.&publication+year=2004&journal=IEEE+Trans+Syst%2C+Man%2C+Cyber+-+Part+A%3A+Syst+Humans&volume=34&doi=10.1109%2FTSMCA.2004.832811&pages=630-637>Google Scholar</a></div></div></div><div id="ref8" aria-flowto="reference-8-content reference-8-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 8 in the content" id="reference-8-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [8] </div> <div class="circle-list__item__grouped"><div id="reference-8-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Farid</span>, <span class="given-names">Y.</span></span>, <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span> and <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span>, “<span class="article-title">Review and descriptive investigation of the connection between bipedal locomotion and non-prehensile manipulation</span>,” <span class="source">Annu Rev Control</span> <span class="volume">53</span>, <span class="fpage">51</span>–<span class="lpage">69</span> (<span class="year">2022</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Review and descriptive investigation of the connection between bipedal locomotion and non-prehensile manipulation' href=https://dx.doi.org/10.1016/j.arcontrol.2022.04.009>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Review and descriptive investigation of the connection between bipedal locomotion and non-prehensile manipulation' href=https://scholar.google.com/scholar_lookup?title=Review+and+descriptive+investigation+of+the+connection+between+bipedal+locomotion+and+non-prehensile+manipulation&author=Farid+Y.&author=Siciliano+B.&author=Ruggiero+F.&publication+year=2022&journal=Annu+Rev+Control&volume=53&doi=10.1016%2Fj.arcontrol.2022.04.009&pages=51-69>Google Scholar</a></div></div></div><div id="ref9" aria-flowto="reference-9-content reference-9-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 9 in the content" id="reference-9-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [9] </div> <div class="circle-list__item__grouped"><div id="reference-9-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">McGeer</span>, <span class="given-names">T.</span></span>, “<span class="article-title">Passive dynamic walking</span>,” <span class="source">Int J Robot Res</span> <span class="volume">9</span>(<span class="issue">2</span>), <span class="fpage">62</span>–<span class="lpage">82</span> (<span class="year">1990</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Passive dynamic walking' href=https://dx.doi.org/10.1177/027836499000900206>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Passive dynamic walking' href=https://scholar.google.com/scholar_lookup?title=Passive+dynamic+walking&author=McGeer+T.&publication+year=1990&journal=Int+J+Robot+Res&volume=9&doi=10.1177%2F027836499000900206&pages=62-82>Google Scholar</a></div></div></div><div id="ref10" aria-flowto="reference-10-content reference-10-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 10 in the content" id="reference-10-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [10] </div> <div class="circle-list__item__grouped"><div id="reference-10-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Holm</span>, <span class="given-names">J.</span></span> and <span class="string-name"><span class="surname">Spong</span>, <span class="given-names">M.</span></span>, “<span class="article-title">Kinetic Energy Shaping for Gait Regulation of Underactuated Bipeds</span>,” In: <em class="italic">IEEE International conference on control applications</em>, <span class="publisher-loc">San Antonio, Texas, USA</span> (<span class="year">2008</span>) pp. <span class="fpage">1232</span>–<span class="lpage">1238</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Kinetic Energy Shaping for Gait Regulation of Underactuated Bipeds' href=https://scholar.google.com/scholar_lookup?title=Kinetic+Energy+Shaping+for+Gait+Regulation+of+Underactuated+Bipeds&author=Holm+J.&author=Spong+M.&publication+year=2008>Google Scholar</a></div></div></div><div id="ref11" aria-flowto="reference-11-content reference-11-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><!----></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [11] </div> <div class="circle-list__item__grouped"><div id="reference-11-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Spong</span>, <span class="given-names">M.</span></span> and <span class="string-name"><span class="surname">Bullo</span>, <span class="given-names">F.</span></span>, “<span class="chapter-title">Controlled Symmetries and Passive Walking</span>,” In: <span class="source">Proceeding IFAC Triennal World Congress</span>, (<span class="publisher-loc">Barcelona, Spain</span>, <span class="year">2002</span>).<a class='ref-link' target='_blank' aria-label='Google Scholar link for Proceeding IFAC Triennal World Congress' href=https://scholar.google.com/scholar_lookup?title=Proceeding+IFAC+Triennal+World+Congress&author=Spong+M.&author=Bullo+F.&publication+year=2002>Google Scholar</a></div></div></div><div id="ref12" aria-flowto="reference-12-content reference-12-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 12 in the content" id="reference-12-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [12] </div> <div class="circle-list__item__grouped"><div id="reference-12-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Spong</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Holm</span>, <span class="given-names">J.</span></span> and <span class="string-name"><span class="surname">Lee</span>, <span class="given-names">D.</span></span>, “<span class="chapter-title">Passivity-Based Control of Bipedal Locomotion</span>,” In: <span class="source">IEEE Robotics & Automation Magazine</span>, vol. <span class="volume">12</span>, (<span class="year">2007</span>) pp. <span class="fpage">30</span>–<span class="lpage">40</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for IEEE Robotics & Automation Magazine' href=https://scholar.google.com/scholar_lookup?title=IEEE+Robotics+%26+Automation+Magazine&author=Spong+M.&author=Holm+J.&author=Lee+D.&publication+year=2007&pages=30-40>Google Scholar</a></div></div></div><div id="ref13" aria-flowto="reference-13-content reference-13-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 13 in the content" id="reference-13-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [13] </div> <div class="circle-list__item__grouped"><div id="reference-13-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Mao</span>, <span class="given-names">L.</span></span>, <span class="string-name"><span class="surname">Gao</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Tian</span>, <span class="given-names">Y.</span></span> and <span class="string-name"><span class="surname">Zhao</span>, <span class="given-names">Y.</span></span>, “<span class="article-title">Novel method for preventing shin-collisions in six-legged robots by utilising a robot–terrain interference model</span>,” <span class="source">Mech Mach Theory</span> <span class="volume">151</span>, <span class="fpage">103897</span> (<span class="year">2020</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Novel method for preventing shin-collisions in six-legged robots by utilising a robot–terrain interference model' href=https://dx.doi.org/10.1016/j.mechmachtheory.2020.103897>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Novel method for preventing shin-collisions in six-legged robots by utilising a robot–terrain interference model' href=https://scholar.google.com/scholar_lookup?title=Novel+method+for+preventing+shin-collisions+in+six-legged+robots+by+utilising+a+robot%E2%80%93terrain+interference+model&author=Mao+L.&author=Gao+F.&author=Tian+Y.&author=Zhao+Y.&publication+year=2020&journal=Mech+Mach+Theory&volume=151&doi=10.1016%2Fj.mechmachtheory.2020.103897>Google Scholar</a></div></div></div><div id="ref14" aria-flowto="reference-14-content reference-14-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 14 in the content" id="reference-14-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [14] </div> <div class="circle-list__item__grouped"><div id="reference-14-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Bledt</span>, <span class="given-names">G.</span></span>, <span class="string-name"><span class="surname">Wensing</span>, <span class="given-names">P. M.</span></span>, <span class="string-name"><span class="surname">Ingersoll</span>, <span class="given-names">S.</span></span> and <span class="string-name"><span class="surname">Kim</span>, <span class="given-names">S.</span></span>, “<span class="article-title">Contact Model Fusion for Event-Based Locomotion in Unstructured Terrains</span>,” In: <em class="italic">IEEE International Conference on Robotics and Automation (ICRA) 2018</em>, (<span class="year">2018</span>) pp. <span class="fpage">4399</span>–<span class="lpage">4406</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Contact Model Fusion for Event-Based Locomotion in Unstructured Terrains' href=https://scholar.google.com/scholar_lookup?title=Contact+Model+Fusion+for+Event-Based+Locomotion+in+Unstructured+Terrains&author=Bledt+G.&author=Wensing+P.+M.&author=Ingersoll+S.&author=Kim+S.&publication+year=2018>Google Scholar</a></div></div></div><div id="ref15" aria-flowto="reference-15-content reference-15-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 15 in the content" id="reference-15-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [15] </div> <div class="circle-list__item__grouped"><div id="reference-15-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Fahmi</span>, <span class="given-names">S.</span></span>, <span class="string-name"><span class="surname">Mastalli</span>, <span class="given-names">C.</span></span>, <span class="string-name"><span class="surname">Focchi</span>, <span class="given-names">M.</span></span> and <span class="string-name"><span class="surname">Semini</span>, <span class="given-names">C.</span></span>, “<span class="article-title">Passive whole-body control for quadruped robots: Experimental validation over challenging terrain</span>,” <span class="source">IEEE Robot Auto Lett</span> <span class="volume">4</span>(<span class="issue">3</span>), <span class="fpage">2553</span>–<span class="lpage">2560</span> (<span class="year">2019</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Passive whole-body control for quadruped robots: Experimental validation over challenging terrain' href=https://dx.doi.org/10.1109/LRA.2019.2908502>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Passive whole-body control for quadruped robots: Experimental validation over challenging terrain' href=https://scholar.google.com/scholar_lookup?title=Passive+whole-body+control+for+quadruped+robots%3A+Experimental+validation+over+challenging+terrain&author=Fahmi+S.&author=Mastalli+C.&author=Focchi+M.&author=Semini+C.&publication+year=2019&journal=IEEE+Robot+Auto+Lett&volume=4&doi=10.1109%2FLRA.2019.2908502&pages=2553-2560>Google Scholar</a></div></div></div><div id="ref16" aria-flowto="reference-16-content reference-16-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 16 in the content" id="reference-16-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [16] </div> <div class="circle-list__item__grouped"><div id="reference-16-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Bertoncelli</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span> and <span class="string-name"><span class="surname">Sabattini</span>, <span class="given-names">L.</span></span>, “<span class="article-title">Linear Time-Varying mpc for Nonprehensile Object Manipulation with a Nonholonomic Mobile Robot</span>,” In: <em class="italic">IEEE International Conference on Robotics and Automation (ICRA)</em>, (<span class="year">2020</span>) pp. <span class="fpage">11032</span>–<span class="lpage">11038</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Linear Time-Varying mpc for Nonprehensile Object Manipulation with a Nonholonomic Mobile Robot' href=https://scholar.google.com/scholar_lookup?title=Linear+Time-Varying+mpc+for+Nonprehensile+Object+Manipulation+with+a+Nonholonomic+Mobile+Robot&author=Bertoncelli+F.&author=Ruggiero+F.&author=Sabattini+L.&publication+year=2020>Google Scholar</a></div></div></div><div id="ref17" aria-flowto="reference-17-content reference-17-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 17 in the content" id="reference-17-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [17] </div> <div class="circle-list__item__grouped"><div id="reference-17-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Bertoncelli</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Selvaggio</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span> and <span class="string-name"><span class="surname">Sabattini</span>, <span class="given-names">L.</span></span>, “<span class="article-title">Task-Oriented Contact Optimization for Pushing Manipulation with Mobile Robots</span>,” In: <em class="italic">IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)</em>, (<span class="year">2022</span>) pp. <span class="fpage">1639</span>–<span class="lpage">1646</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Task-Oriented Contact Optimization for Pushing Manipulation with Mobile Robots' href=https://scholar.google.com/scholar_lookup?title=Task-Oriented+Contact+Optimization+for+Pushing+Manipulation+with+Mobile+Robots&author=Bertoncelli+F.&author=Selvaggio+M.&author=Ruggiero+F.&author=Sabattini+L.&publication+year=2022>Google Scholar</a></div></div></div><div id="ref18" aria-flowto="reference-18-content reference-18-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 18 in the content" id="reference-18-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [18] </div> <div class="circle-list__item__grouped"><div id="reference-18-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Morlando</span>, <span class="given-names">V.</span></span>, <span class="string-name"><span class="surname">Selvaggio</span>, <span class="given-names">M.</span></span> and <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span>, “<span class="article-title">Nonprehensile Object Transportation with a Legged Manipulator</span>,” In: <em class="italic">International Conference on Robotics and Automation (ICRA)</em>, <span class="year">2022</span>) pp. <span class="fpage">6628</span>–<span class="lpage">6634</span>.<a class='ref-link' target='_blank' aria-label='CrossRef link for Nonprehensile Object Transportation with a Legged Manipulator' href=https://dx.doi.org/10.1109/ICRA46639.2022.9811810>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Nonprehensile Object Transportation with a Legged Manipulator' href=https://scholar.google.com/scholar_lookup?title=Nonprehensile+Object+Transportation+with+a+Legged+Manipulator&author=Morlando+V.&author=Selvaggio+M.&author=Ruggiero+F.&publication+year=2022>Google Scholar</a></div></div></div><div id="ref19" aria-flowto="reference-19-content reference-19-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 19 in the content" id="reference-19-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [19] </div> <div class="circle-list__item__grouped"><div id="reference-19-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Selvaggio</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Cacace</span>, <span class="given-names">J.</span></span>, <span class="string-name"><span class="surname">Pacchierotti</span>, <span class="given-names">C.</span></span>, <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span> and <span class="string-name"><span class="surname">Giordano</span>, <span class="given-names">P. R.</span></span>, “<span class="article-title">A shared-control teleoperation architecture for nonprehensile object transportation</span>,” <span class="source">IEEE Trans Robot</span> <span class="volume">38</span>(<span class="issue">1</span>), <span class="fpage">569</span>–<span class="lpage">583</span> (<span class="year">2022</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for A shared-control teleoperation architecture for nonprehensile object transportation' href=https://dx.doi.org/10.1109/TRO.2021.3086773>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for A shared-control teleoperation architecture for nonprehensile object transportation' href=https://scholar.google.com/scholar_lookup?title=A+shared-control+teleoperation+architecture+for+nonprehensile+object+transportation&author=Selvaggio+M.&author=Cacace+J.&author=Pacchierotti+C.&author=Ruggiero+F.&author=Giordano+P.+R.&publication+year=2022&journal=IEEE+Trans+Robot&volume=38&doi=10.1109%2FTRO.2021.3086773&pages=569-583>Google Scholar</a></div></div></div><div id="ref20" aria-flowto="reference-20-content reference-20-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 20 in the content" id="reference-20-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [20] </div> <div class="circle-list__item__grouped"><div id="reference-20-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Selvaggio</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Garg</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Oriolo</span>, <span class="given-names">G.</span></span> and <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span>, “<span class="article-title">Non-prehensile object transportation via model predictive non-sliding manipulation control</span>,” <span class="source">IEEE Trans Contr Syst T</span> <span class="volume">31</span>(<span class="issue">5</span>), <span class="fpage">2231</span>–<span class="lpage">2244</span> (<span class="year">2023</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Non-prehensile object transportation via model predictive non-sliding manipulation control' href=https://dx.doi.org/10.1109/TCST.2023.3277224>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Non-prehensile object transportation via model predictive non-sliding manipulation control' href=https://scholar.google.com/scholar_lookup?title=Non-prehensile+object+transportation+via+model+predictive+non-sliding+manipulation+control&author=Selvaggio+M.&author=Garg+A.&author=Ruggiero+F.&author=Oriolo+G.&author=Siciliano+B.&publication+year=2023&journal=IEEE+Trans+Contr+Syst+T&volume=31&doi=10.1109%2FTCST.2023.3277224&pages=2231-2244>Google Scholar</a></div></div></div><div id="ref21" aria-flowto="reference-21-content reference-21-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 21 in the content" id="reference-21-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [21] </div> <div class="circle-list__item__grouped"><div id="reference-21-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Subburaman</span>, <span class="given-names">R.</span></span>, <span class="string-name"><span class="surname">Selvaggio</span>, <span class="given-names">M.</span></span> and <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span>, “<span class="article-title">A non-prehensile object transportation framework with adaptive tilting based on quadratic programming</span>,” <span class="source">IEEE Robot Auto Lett</span> <span class="volume">8</span>(<span class="issue">6</span>), <span class="fpage">3581</span>–<span class="lpage">3588</span> (<span class="year">2023</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for A non-prehensile object transportation framework with adaptive tilting based on quadratic programming' href=https://dx.doi.org/10.1109/LRA.2023.3268594>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for A non-prehensile object transportation framework with adaptive tilting based on quadratic programming' href=https://scholar.google.com/scholar_lookup?title=A+non-prehensile+object+transportation+framework+with+adaptive+tilting+based+on+quadratic+programming&author=Subburaman+R.&author=Selvaggio+M.&author=Ruggiero+F.&publication+year=2023&journal=IEEE+Robot+Auto+Lett&volume=8&doi=10.1109%2FLRA.2023.3268594&pages=3581-3588>Google Scholar</a></div></div></div><div id="ref22" aria-flowto="reference-22-content reference-22-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><!----></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [22] </div> <div class="circle-list__item__grouped"><div id="reference-22-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Donaire</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Buonocore</span>, <span class="given-names">L. R.</span></span>, <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span> and <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span>, “<span class="article-title">Passivity-based control for a rolling-balancing system: The nonprehensile disk-on-disk</span>,” <span class="source">IEEE Trans Contr Syst Tech</span> <span class="volume">25</span>(<span class="issue">6</span>), <span class="fpage">2135</span>–<span class="lpage">2142</span> (<span class="year">2017</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Passivity-based control for a rolling-balancing system: The nonprehensile disk-on-disk' href=https://dx.doi.org/10.1109/TCST.2016.2637719>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Passivity-based control for a rolling-balancing system: The nonprehensile disk-on-disk' href=https://scholar.google.com/scholar_lookup?title=Passivity-based+control+for+a+rolling-balancing+system%3A+The+nonprehensile+disk-on-disk&author=Donaire+A.&author=Ruggiero+F.&author=Buonocore+L.+R.&author=Lippiello+V.&author=Siciliano+B.&publication+year=2017&journal=IEEE+Trans+Contr+Syst+Tech&volume=25&doi=10.1109%2FTCST.2016.2637719&pages=2135-2142>Google Scholar</a></div></div></div><div id="ref23" aria-flowto="reference-23-content reference-23-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 23 in the content" id="reference-23-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [23] </div> <div class="circle-list__item__grouped"><div id="reference-23-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Fontanelli</span>, <span class="given-names">G. A.</span></span>, <span class="string-name"><span class="surname">Selvaggio</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Buonocore</span>, <span class="given-names">L. R.</span></span>, <span class="string-name"><span class="surname">Ficuciello</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Villani</span>, <span class="given-names">L.</span></span> and <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span>, “<span class="article-title">A new laparoscopic tool with in-hand rolling capabilities for needle reorientation</span>,” <span class="source">IEEE Robot Autom Lett</span> <span class="volume">3</span>(<span class="issue">3</span>), <span class="fpage">2354</span>–<span class="lpage">2361</span> (<span class="year">2018</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for A new laparoscopic tool with in-hand rolling capabilities for needle reorientation' href=https://dx.doi.org/10.1109/LRA.2018.2809443>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for A new laparoscopic tool with in-hand rolling capabilities for needle reorientation' href=https://scholar.google.com/scholar_lookup?title=A+new+laparoscopic+tool+with+in-hand+rolling+capabilities+for+needle+reorientation&author=Fontanelli+G.+A.&author=Selvaggio+M.&author=Buonocore+L.+R.&author=Ficuciello+F.&author=Villani+L.&author=Siciliano+B.&publication+year=2018&journal=IEEE+Robot+Autom+Lett&volume=3&doi=10.1109%2FLRA.2018.2809443&pages=2354-2361>Google Scholar</a></div></div></div><div id="ref24" aria-flowto="reference-24-content reference-24-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><!----></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [24] </div> <div class="circle-list__item__grouped"><div id="reference-24-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Gutiérrez-Giles</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span> and <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span>, “<span class="article-title">Nonprehensile manipulation of an underactuated mechanical system with second-order nonholonomic constraints: The robotic hula-hoop</span>,” <span class="source">IEEE Robot Autom Lett</span> <span class="volume">3</span>(<span class="issue">2</span>), <span class="fpage">1136</span>–<span class="lpage">1143</span> (<span class="year">2018</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Nonprehensile manipulation of an underactuated mechanical system with second-order nonholonomic constraints: The robotic hula-hoop' href=https://dx.doi.org/10.1109/LRA.2018.2792403>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Nonprehensile manipulation of an underactuated mechanical system with second-order nonholonomic constraints: The robotic hula-hoop' href=https://scholar.google.com/scholar_lookup?title=Nonprehensile+manipulation+of+an+underactuated+mechanical+system+with+second-order+nonholonomic+constraints%3A+The+robotic+hula-hoop&author=Guti%C3%A9rrez-Giles+A.&author=Ruggiero+F.&author=Lippiello+V.&author=Siciliano+B.&publication+year=2018&journal=IEEE+Robot+Autom+Lett&volume=3&doi=10.1109%2FLRA.2018.2792403&pages=1136-1143>Google Scholar</a></div></div></div><div id="ref25" aria-flowto="reference-25-content reference-25-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 25 in the content" id="reference-25-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [25] </div> <div class="circle-list__item__grouped"><div id="reference-25-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span>, <span class="string-name"><span class="surname">Cacace</span>, <span class="given-names">J.</span></span>, <span class="string-name"><span class="surname">Santamaria-Navarro</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Andrade-Cetto</span>, <span class="given-names">J.</span></span>, <span class="string-name"><span class="surname">Trujillo</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Esteves</span>, <span class="given-names">Y.</span></span> and <span class="string-name"><span class="surname">Viguria</span>, <span class="given-names">A.</span></span>, “<span class="article-title">Hybrid visual servoing with hierarchical task composition for aerial manipulation</span>,” <span class="source">IEEE Robot Auto Lett</span> <span class="volume">1</span>(<span class="issue">1</span>), <span class="fpage">259</span>–<span class="lpage">266</span> (<span class="year">2016</span>a).<a class='ref-link' target='_blank' aria-label='CrossRef link for Hybrid visual servoing with hierarchical task composition for aerial manipulation' href=https://dx.doi.org/10.1109/LRA.2015.2510749>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Hybrid visual servoing with hierarchical task composition for aerial manipulation' href=https://scholar.google.com/scholar_lookup?title=Hybrid+visual+servoing+with+hierarchical+task+composition+for+aerial+manipulation&author=Lippiello+V.&author=Cacace+J.&author=Santamaria-Navarro+A.&author=Andrade-Cetto+J.&author=Trujillo+M.&author=Esteves+Y.&author=Viguria+A.&publication+year=2016&journal=IEEE+Robot+Auto+Lett&volume=1&doi=10.1109%2FLRA.2015.2510749&pages=259-266>Google Scholar</a></div></div></div><div id="ref26" aria-flowto="reference-26-content reference-26-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 26 in the content" id="reference-26-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [26] </div> <div class="circle-list__item__grouped"><div id="reference-26-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Ryu</span>, <span class="given-names">J.-C.</span></span>, <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span> and <span class="string-name"><span class="surname">Lynch</span>, <span class="given-names">K. M.</span></span>, “<span class="article-title">Control of nonprehensile rolling manipulation: Balancing a disk on a disk</span>,” <span class="source">IEEE Trans Robot</span> <span class="volume">29</span>(<span class="issue">5</span>), <span class="fpage">1152</span>–<span class="lpage">1161</span> (<span class="year">2013</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Control of nonprehensile rolling manipulation: Balancing a disk on a disk' href=https://dx.doi.org/10.1109/TRO.2013.2262775>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Control of nonprehensile rolling manipulation: Balancing a disk on a disk' href=https://scholar.google.com/scholar_lookup?title=Control+of+nonprehensile+rolling+manipulation%3A+Balancing+a+disk+on+a+disk&author=Ryu+J.-C.&author=Ruggiero+F.&author=Lynch+K.+M.&publication+year=2013&journal=IEEE+Trans+Robot&volume=29&doi=10.1109%2FTRO.2013.2262775&pages=1152-1161>Google Scholar</a></div></div></div><div id="ref27" aria-flowto="reference-27-content reference-27-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 27 in the content" id="reference-27-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [27] </div> <div class="circle-list__item__grouped"><div id="reference-27-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Serra</span>, <span class="given-names">D.</span></span>, <span class="string-name"><span class="surname">Ferguson</span>, <span class="given-names">J.</span></span>, <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Siniscalco</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Petit</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span> and <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span>, “<span class="article-title">On the Experiments about the Nonprehensile Reconfiguration of a Rolling Sphere on a Plate</span>,” In: <em class="italic">26th Mediterranean Conference on Control and Automation (MED)</em>, (<span class="year">2018</span>) pp. <span class="fpage">13</span>–<span class="lpage">20</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for On the Experiments about the Nonprehensile Reconfiguration of a Rolling Sphere on a Plate' href=https://scholar.google.com/scholar_lookup?title=On+the+Experiments+about+the+Nonprehensile+Reconfiguration+of+a+Rolling+Sphere+on+a+Plate&author=Serra+D.&author=Ferguson+J.&author=Ruggiero+F.&author=Siniscalco+A.&author=Petit+A.&author=Lippiello+V.&author=Siciliano+B.&publication+year=2018>Google Scholar</a></div></div></div><div id="ref28" aria-flowto="reference-28-content reference-28-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 28 in the content" id="reference-28-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [28] </div> <div class="circle-list__item__grouped"><div id="reference-28-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Gutiérrez-Giles</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span> and <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span>, “<span class="article-title">Cosed-Loop Control of a Nonprehensile Manipulation System Inspired by a Pizza-Peel Mechanism</span>,” In: <em class="italic">European Control Conference</em>, ( <span class="year">2019</span>) pp. <span class="fpage">1580</span>–<span class="lpage">1585</span>.<a class='ref-link' target='_blank' aria-label='CrossRef link for Cosed-Loop Control of a Nonprehensile Manipulation System Inspired by a Pizza-Peel Mechanism' href=https://dx.doi.org/10.23919/ECC.2019.8796077>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Cosed-Loop Control of a Nonprehensile Manipulation System Inspired by a Pizza-Peel Mechanism' href=https://scholar.google.com/scholar_lookup?title=Cosed-Loop+Control+of+a+Nonprehensile+Manipulation+System+Inspired+by+a+Pizza-Peel+Mechanism&author=Guti%C3%A9rrez-Giles+A.&author=Ruggiero+F.&author=Lippiello+V.&author=Siciliano+B.&publication+year=2019>Google Scholar</a></div></div></div><div id="ref29" aria-flowto="reference-29-content reference-29-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 29 in the content" id="reference-29-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [29] </div> <div class="circle-list__item__grouped"><div id="reference-29-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Kim</span>, <span class="given-names">J.-T.</span></span>, <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span> and <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span>, “<span class="chapter-title">Planning Framework for Robotic Pizza Dough Stretching with a Rolling Pin</span>,” In: <span class="source">Robot Dynamic Manipulation. Perception of Deformable Objects and Nonprehensile Manipulation Control</span>, <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span> and <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span> eds.),<span class="volume">144</span> (<span class="publisher-name">Springer</span>, <span class="publisher-loc">Cham</span>, <span class="year">2022</span>) pp. <span class="fpage">229</span>–<span class="lpage">253</span>.<a class='ref-link' target='_blank' aria-label='CrossRef link for Robot Dynamic Manipulation. Perception of Deformable Objects and Nonprehensile Manipulation Control' href=https://dx.doi.org/10.1007/978-3-030-93290-9_9>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Robot Dynamic Manipulation. Perception of Deformable Objects and Nonprehensile Manipulation Control' href=https://scholar.google.com/scholar_lookup?title=Robot+Dynamic+Manipulation.+Perception+of+Deformable+Objects+and+Nonprehensile+Manipulation+Control&author=Kim+J.-T.&author=Ruggiero+F.&author=Lippiello+V.&author=Siciliano+B.&author=Siciliano+B.&author=Ruggiero+F.&publication+year=2022&pages=229-253>Google Scholar</a></div></div></div><div id="ref30" aria-flowto="reference-30-content reference-30-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 30 in the content" id="reference-30-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [30] </div> <div class="circle-list__item__grouped"><div id="reference-30-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Kim</span>, <span class="given-names">J.-T.</span></span>, <span class="string-name"><span class="surname">Gutiérrez-Giles</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Satici</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Donaire</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Cacace</span>, <span class="given-names">J.</span></span>, <span class="string-name"><span class="surname">Buonocore</span>, <span class="given-names">L. R.</span></span>, <span class="string-name"><span class="surname">Fontanelli</span>, <span class="given-names">G. A.</span></span>, <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span> and <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span>, “<span class="chapter-title">Nonprehensile Manipulation Control and Task Planning for Deformable Object Manipulation: Results From the RoDyMan Project</span>,” In: <span class="source">Informatics in Control, Automation and Robotics, Lecture Notes in Electrical Engineering</span>, <span class="string-name"><span class="surname">Gusikhin</span>, <span class="given-names">O.</span></span> and <span class="string-name"><span class="surname">Madani</span>, <span class="given-names">K.</span></span> (<span class="publisher-name">Springer</span>, <span class="publisher-loc">Cham</span>, <span class="year">2020</span>) pp. <span class="fpage">76</span>–<span class="lpage">100</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Informatics in Control, Automation and Robotics, Lecture Notes in Electrical Engineering' href=https://scholar.google.com/scholar_lookup?title=Informatics+in+Control%2C+Automation+and+Robotics%2C+Lecture+Notes+in+Electrical+Engineering&author=Ruggiero+F.&author=Kim+J.-T.&author=Guti%C3%A9rrez-Giles+A.&author=Satici+A.&author=Donaire+A.&author=Cacace+J.&author=Buonocore+L.+R.&author=Fontanelli+G.+A.&author=Lippiello+V.&author=Siciliano+B.&author=Gusikhin+O.&author=Madani+K.&publication+year=2020&pages=76-100>Google Scholar</a></div></div></div><div id="ref31" aria-flowto="reference-31-content reference-31-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 31 in the content" id="reference-31-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [31] </div> <div class="circle-list__item__grouped"><div id="reference-31-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Petit</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Serra</span>, <span class="given-names">D.</span></span>, <span class="string-name"><span class="surname">Satici</span>, <span class="given-names">A. C.</span></span>, <span class="string-name"><span class="surname">Cacace</span>, <span class="given-names">J.</span></span>, <span class="string-name"><span class="surname">Donaire</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Ficuciello</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Buonocore</span>, <span class="given-names">L. R.</span></span>, <span class="string-name"><span class="surname">Fontanelli</span>, <span class="given-names">G. A.</span></span>, <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span>, <span class="string-name"><span class="surname">Villani</span>, <span class="given-names">L.</span></span> and <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span>, “<span class="article-title">Nonprehensile manipulation of deformable objects: Achievements and perspectives from the roDyMan project</span>,” <span class="source">IEEE Robot Autom Mag</span> <span class="volume">25</span>(<span class="issue">3</span>), <span class="fpage">83</span>–<span class="lpage">92</span> (<span class="year">2018</span>c).<a class='ref-link' target='_blank' aria-label='CrossRef link for Nonprehensile manipulation of deformable objects: Achievements and perspectives from the roDyMan project' href=https://dx.doi.org/10.1109/MRA.2017.2781306>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Nonprehensile manipulation of deformable objects: Achievements and perspectives from the roDyMan project' href=https://scholar.google.com/scholar_lookup?title=Nonprehensile+manipulation+of+deformable+objects%3A+Achievements+and+perspectives+from+the+roDyMan+project&author=Ruggiero+F.&author=Petit+A.&author=Serra+D.&author=Satici+A.+C.&author=Cacace+J.&author=Donaire+A.&author=Ficuciello+F.&author=Buonocore+L.+R.&author=Fontanelli+G.+A.&author=Lippiello+V.&author=Villani+L.&author=Siciliano+B.&publication+year=2018&journal=IEEE+Robot+Autom+Mag&volume=25&doi=10.1109%2FMRA.2017.2781306&pages=83-92>Google Scholar</a></div></div></div><div id="ref32" aria-flowto="reference-32-content reference-32-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 32 in the content" id="reference-32-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [32] </div> <div class="circle-list__item__grouped"><div id="reference-32-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Arpenti</span>, <span class="given-names">P.</span></span>, <span class="string-name"><span class="surname">Donaire</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span> and <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span>, “<span class="article-title">Energy Pumping-and-Damping for Gait Robustification of Underactuated Planar Biped Robots Within the Hybrid Zero Dynamics Framework</span>,” In: <em class="italic">IEEE-RAS 20th International Conference on Humanoid Robots (Humanoids)</em>, (<span class="year">2020</span>) pp. <span class="fpage">415</span>–<span class="lpage">421</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Energy Pumping-and-Damping for Gait Robustification of Underactuated Planar Biped Robots Within the Hybrid Zero Dynamics Framework' href=https://scholar.google.com/scholar_lookup?title=Energy+Pumping-and-Damping+for+Gait+Robustification+of+Underactuated+Planar+Biped+Robots+Within+the+Hybrid+Zero+Dynamics+Framework&author=Arpenti+P.&author=Donaire+A.&author=Ruggiero+F.&author=Lippiello+V.&publication+year=2020>Google Scholar</a></div></div></div><div id="ref33" aria-flowto="reference-33-content reference-33-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 33 in the content" id="reference-33-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [33] </div> <div class="circle-list__item__grouped"><div id="reference-33-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Arpenti</span>, <span class="given-names">P.</span></span>, <span class="string-name"><span class="surname">Donaire</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span> and <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span>, “<span class="article-title">Uniform global exponential stabilizing passivity-based tracking controller applied to planar biped robots</span>,” In: <em class="italic">IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)</em>, (<span class="year">2022</span>a) pp. <span class="fpage">6739</span>–<span class="lpage">6745</span>.<a class='ref-link' target='_blank' aria-label='CrossRef link for Uniform global exponential stabilizing passivity-based tracking controller applied to planar biped robots' href=https://dx.doi.org/10.1109/IROS47612.2022.9981206>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Uniform global exponential stabilizing passivity-based tracking controller applied to planar biped robots' href=https://scholar.google.com/scholar_lookup?title=Uniform+global+exponential+stabilizing+passivity-based+tracking+controller+applied+to+planar+biped+robots&author=Arpenti+P.&author=Donaire+A.&author=Ruggiero+F.&author=Lippiello+V.&publication+year=2022>Google Scholar</a></div></div></div><div id="ref34" aria-flowto="reference-34-content reference-34-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 34 in the content" id="reference-34-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [34] </div> <div class="circle-list__item__grouped"><div id="reference-34-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Arpenti</span>, <span class="given-names">P.</span></span>, <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span> and <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span>, “<span class="article-title">Interconnection and Damping Assignment Passivity-Based Control for Gait Generation in Underactuated Compass-like Robots</span>,” In: <em class="italic">IEEE International Conference on Robotics and Automation (ICRA)</em>, (<span class="year">2020</span>b) pp. <span class="fpage">9802</span>–<span class="lpage">9808</span>.<a class='ref-link' target='_blank' aria-label='CrossRef link for Interconnection and Damping Assignment Passivity-Based Control for Gait Generation in Underactuated Compass-like Robots' href=https://dx.doi.org/10.1109/ICRA40945.2020.9196598>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Interconnection and Damping Assignment Passivity-Based Control for Gait Generation in Underactuated Compass-like Robots' href=https://scholar.google.com/scholar_lookup?title=Interconnection+and+Damping+Assignment+Passivity-Based+Control+for+Gait+Generation+in+Underactuated+Compass-like+Robots&author=Arpenti+P.&author=Ruggiero+F.&author=Lippiello+V.&publication+year=2020>Google Scholar</a></div></div></div><div id="ref35" aria-flowto="reference-35-content reference-35-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 35 in the content" id="reference-35-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [35] </div> <div class="circle-list__item__grouped"><div id="reference-35-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Arpenti</span>, <span class="given-names">P.</span></span>, <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span> and <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span>, “<span class="article-title">A constructive methodology for the IDA-PBC of underactuated 2-doF mechanical systems with explicit solution of PDEs</span>,” <span class="source">Int J Control, Auto Syst</span> <span class="volume">20</span>(<span class="issue">1</span>), <span class="fpage">283</span>–<span class="lpage">297</span> (<span class="year">2022</span>b).<a class='ref-link' target='_blank' aria-label='CrossRef link for A constructive methodology for the IDA-PBC of underactuated 2-doF mechanical systems with explicit solution of PDEs' href=https://dx.doi.org/10.1007/s12555-020-0839-1>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for A constructive methodology for the IDA-PBC of underactuated 2-doF mechanical systems with explicit solution of PDEs' href=https://scholar.google.com/scholar_lookup?title=A+constructive+methodology+for+the+IDA-PBC+of+underactuated+2-doF+mechanical+systems+with+explicit+solution+of+PDEs&author=Arpenti+P.&author=Ruggiero+F.&author=Lippiello+V.&publication+year=2022&journal=Int+J+Control%2C+Auto+Syst&volume=20&doi=10.1007%2Fs12555-020-0839-1&pages=283-297>Google Scholar</a></div></div></div><div id="ref36" aria-flowto="reference-36-content reference-36-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 36 in the content" id="reference-36-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [36] </div> <div class="circle-list__item__grouped"><div id="reference-36-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Nacusse</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Arpenti</span>, <span class="given-names">P.</span></span>, <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span> and <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span>, “<span class="chapter-title">Gait Generation for Underactuated Compass-Like Robots Using Dissipative Forces in the Controller</span>,” In: <span class="source">IFAC-PapersOnLine</span>. vol. <span class="volume">53</span> (<span class="publisher-name">IFAC World Congress</span>, <span class="year">2020</span>) pp. <span class="fpage">9023</span>–<span class="lpage">9030</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for IFAC-PapersOnLine' href=https://scholar.google.com/scholar_lookup?title=IFAC-PapersOnLine&author=Nacusse+M.&author=Arpenti+P.&author=Ruggiero+F.&author=Lippiello+V.&publication+year=2020&pages=9023-9030>Google Scholar</a></div></div></div><div id="ref37" aria-flowto="reference-37-content reference-37-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 37 in the content" id="reference-37-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [37] </div> <div class="circle-list__item__grouped"><div id="reference-37-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Morlando</span>, <span class="given-names">V.</span></span>, <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span> and <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span>, “<span class="article-title">Tethering a Human with a Quadruped Robot: A Guide Dog to Help Visually Impaired People</span>,” In: <em class="italic">31st Mediterranean Conference on Control and Automation (MED) 2023</em>, (<span class="year">2023</span>) pp. <span class="fpage">547</span>–<span class="lpage">553</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Tethering a Human with a Quadruped Robot: A Guide Dog to Help Visually Impaired People' href=https://scholar.google.com/scholar_lookup?title=Tethering+a+Human+with+a+Quadruped+Robot%3A+A+Guide+Dog+to+Help+Visually+Impaired+People&author=Morlando+V.&author=Lippiello+V.&author=Ruggiero+F.&publication+year=2023>Google Scholar</a></div></div></div><div id="ref38" aria-flowto="reference-38-content reference-38-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 38 in the content" id="reference-38-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [38] </div> <div class="circle-list__item__grouped"><div id="reference-38-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Morlando</span>, <span class="given-names">V.</span></span> and <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span>, “<span class="article-title">Disturbance Rejection for Legged Robots Through a Hybrid Observer</span>,” In: <em class="italic">30th Mediterranean Conference on Control and Automation (MED)</em>, <span class="year">2022</span>) pp. <span class="fpage">743</span>–<span class="lpage">748</span>.<a class='ref-link' target='_blank' aria-label='CrossRef link for Disturbance Rejection for Legged Robots Through a Hybrid Observer' href=https://dx.doi.org/10.1109/MED54222.2022.9837169>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Disturbance Rejection for Legged Robots Through a Hybrid Observer' href=https://scholar.google.com/scholar_lookup?title=Disturbance+Rejection+for+Legged+Robots+Through+a+Hybrid+Observer&author=Morlando+V.&author=Ruggiero+F.&publication+year=2022>Google Scholar</a></div></div></div><div id="ref39" aria-flowto="reference-39-content reference-39-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 39 in the content" id="reference-39-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [39] </div> <div class="circle-list__item__grouped"><div id="reference-39-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Morlando</span>, <span class="given-names">V.</span></span>, <span class="string-name"><span class="surname">Teimoorzadeh</span>, <span class="given-names">A.</span></span> and <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span>, “<span class="article-title">Whole-body control with disturbance rejection through a momentum-based observer for quadruped robots</span>,” <span class="source">Mech Mach Theory</span> <span class="volume">164</span>, <span class="fpage">104412</span> (<span class="year">2021</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Whole-body control with disturbance rejection through a momentum-based observer for quadruped robots' href=https://dx.doi.org/10.1016/j.mechmachtheory.2021.104412>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Whole-body control with disturbance rejection through a momentum-based observer for quadruped robots' href=https://scholar.google.com/scholar_lookup?title=Whole-body+control+with+disturbance+rejection+through+a+momentum-based+observer+for+quadruped+robots&author=Morlando+V.&author=Teimoorzadeh+A.&author=Ruggiero+F.&publication+year=2021&journal=Mech+Mach+Theory&volume=164&doi=10.1016%2Fj.mechmachtheory.2021.104412>Google Scholar</a></div></div></div><div id="ref40" aria-flowto="reference-40-content reference-40-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 40 in the content" id="reference-40-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [40] </div> <div class="circle-list__item__grouped"><div id="reference-40-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span>, <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span> and <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span>, “<span class="article-title">The effect of shapes in input-state linearization for stabilization of nonprehensile planar rolling dynamic manipulation</span>,” <span class="source">IEEE Robot Autom Lett</span> <span class="volume">1</span>(<span class="issue">1</span>), <span class="fpage">492</span>–<span class="lpage">499</span> (<span class="year">2016</span>b).<a class='ref-link' target='_blank' aria-label='CrossRef link for The effect of shapes in input-state linearization for stabilization of nonprehensile planar rolling dynamic manipulation' href=https://dx.doi.org/10.1109/LRA.2016.2519147>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for The effect of shapes in input-state linearization for stabilization of nonprehensile planar rolling dynamic manipulation' href=https://scholar.google.com/scholar_lookup?title=The+effect+of+shapes+in+input-state+linearization+for+stabilization+of+nonprehensile+planar+rolling+dynamic+manipulation&author=Lippiello+V.&author=Ruggiero+F.&author=Siciliano+B.&publication+year=2016&journal=IEEE+Robot+Autom+Lett&volume=1&doi=10.1109%2FLRA.2016.2519147&pages=492-499>Google Scholar</a></div></div></div><div id="ref41" aria-flowto="reference-41-content reference-41-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 41 in the content" id="reference-41-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [41] </div> <div class="circle-list__item__grouped"><div id="reference-41-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Heins</span>, <span class="given-names">A.</span></span> and <span class="string-name"><span class="surname">Schoellig</span>, <span class="given-names">A. P.</span></span>, “<span class="article-title">Keep it upright: Model predictive control for nonprehensile object transportation with obstacle avoidance on a mobile manipulator</span>,” <span class="source">IEEE Robot Auto Lett</span> <span class="volume">8</span>(<span class="issue">12</span>), <span class="fpage">7986</span>–<span class="lpage">7993</span> (<span class="year">2023</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Keep it upright: Model predictive control for nonprehensile object transportation with obstacle avoidance on a mobile manipulator' href=https://dx.doi.org/10.1109/LRA.2023.3324520>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Keep it upright: Model predictive control for nonprehensile object transportation with obstacle avoidance on a mobile manipulator' href=https://scholar.google.com/scholar_lookup?title=Keep+it+upright%3A+Model+predictive+control+for+nonprehensile+object+transportation+with+obstacle+avoidance+on+a+mobile+manipulator&author=Heins+A.&author=Schoellig+A.+P.&publication+year=2023&journal=IEEE+Robot+Auto+Lett&volume=8&doi=10.1109%2FLRA.2023.3324520&pages=7986-7993>Google Scholar</a></div></div></div><div id="ref42" aria-flowto="reference-42-content reference-42-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 42 in the content" id="reference-42-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [42] </div> <div class="circle-list__item__grouped"><div id="reference-42-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Muchacho</span>, <span class="given-names">R. I. C.</span></span>, <span class="string-name"><span class="surname">Laha</span>, <span class="given-names">R.</span></span>, <span class="string-name"><span class="surname">Figueredo</span>, <span class="given-names">L. F.</span></span> and <span class="string-name"><span class="surname">Haddadin</span>, <span class="given-names">S.</span></span>, “<span class="article-title">A Solution to Slosh-Free Robot Trajectory Optimization</span>,” In: <em class="italic">IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)</em>, (<span class="year">2022</span>) pp. <span class="fpage">223</span>–<span class="lpage">230</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for A Solution to Slosh-Free Robot Trajectory Optimization' href=https://scholar.google.com/scholar_lookup?title=A+Solution+to+Slosh-Free+Robot+Trajectory+Optimization&author=Muchacho+R.+I.+C.&author=Laha+R.&author=Figueredo+L.+F.&author=Haddadin+S.&publication+year=2022>Google Scholar</a></div></div></div><div id="ref43" aria-flowto="reference-43-content reference-43-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 43 in the content" id="reference-43-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [43] </div> <div class="circle-list__item__grouped"><div id="reference-43-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Muchacho</span>, <span class="given-names">R. I. C.</span></span>, <span class="string-name"><span class="surname">Bien</span>, <span class="given-names">S.</span></span>, <span class="string-name"><span class="surname">Laha</span>, <span class="given-names">R.</span></span>, <span class="string-name"><span class="surname">Naceri</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Figueredo</span>, <span class="given-names">L. F.</span></span> and <span class="string-name"><span class="surname">Haddadin</span>, <span class="given-names">S.</span></span>. <span class="article-title">Shared Autonomy Control for Slosh-Free Teleoperation</span>. In: <em class="italic">IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)</em>, (<span class="year">2023</span>) pp. <span class="fpage">10676</span>–<span class="lpage">10683</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Shared Autonomy Control for Slosh-Free Teleoperation' href=https://scholar.google.com/scholar_lookup?title=Shared+Autonomy+Control+for+Slosh-Free+Teleoperation&author=Muchacho+R.+I.+C.&author=Bien+S.&author=Laha+R.&author=Naceri+A.&author=Figueredo+L.+F.&author=Haddadin+S.&publication+year=2023>Google Scholar</a></div></div></div><div id="ref44" aria-flowto="reference-44-content reference-44-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 44 in the content" id="reference-44-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [44] </div> <div class="circle-list__item__grouped"><div id="reference-44-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Meriçli</span>, <span class="given-names">T.</span></span>, <span class="string-name"><span class="surname">Veloso</span>, <span class="given-names">M.</span></span> and <span class="string-name"><span class="surname">Akın</span>, <span class="given-names">H. L.</span></span>, “<span class="article-title">Push-manipulation of complex passive mobile objects using experimentally acquired motion models</span>,” <span class="source">Auton Robot</span> <span class="volume">38</span>(<span class="issue">3</span>), <span class="fpage">317</span>–<span class="lpage">329</span> (<span class="year">2015</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Push-manipulation of complex passive mobile objects using experimentally acquired motion models' href=https://dx.doi.org/10.1007/s10514-014-9414-z>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Push-manipulation of complex passive mobile objects using experimentally acquired motion models' href=https://scholar.google.com/scholar_lookup?title=Push-manipulation+of+complex+passive+mobile+objects+using+experimentally+acquired+motion+models&author=Meri%C3%A7li+T.&author=Veloso+M.&author=Ak%C4%B1n+H.+L.&publication+year=2015&journal=Auton+Robot&volume=38&doi=10.1007%2Fs10514-014-9414-z&pages=317-329>Google Scholar</a></div></div></div><div id="ref45" aria-flowto="reference-45-content reference-45-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 45 in the content" id="reference-45-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [45] </div> <div class="circle-list__item__grouped"><div id="reference-45-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Novin</span>, <span class="given-names">R. S.</span></span>, <span class="string-name"><span class="surname">Yazdani</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Merryweather</span>, <span class="given-names">A.</span></span> and <span class="string-name"><span class="surname">Hermans</span>, <span class="given-names">T.</span></span>, “<span class="article-title">A model predictive approach for online mobile manipulation of non-holonomic objects using learned dynamics</span>,” <span class="source">Int J Robot Res</span> <span class="volume">40</span>(<span class="issue">4-5</span>), <span class="fpage">815</span>–<span class="lpage">831</span> (<span class="year">2021</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for A model predictive approach for online mobile manipulation of non-holonomic objects using learned dynamics' href=https://dx.doi.org/10.1177/0278364921992793>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for A model predictive approach for online mobile manipulation of non-holonomic objects using learned dynamics' href=https://scholar.google.com/scholar_lookup?title=A+model+predictive+approach+for+online+mobile+manipulation+of+non-holonomic+objects+using+learned+dynamics&author=Novin+R.+S.&author=Yazdani+A.&author=Merryweather+A.&author=Hermans+T.&publication+year=2021&journal=Int+J+Robot+Res&volume=40&doi=10.1177%2F0278364921992793&pages=815-831>Google Scholar</a></div></div></div><div id="ref46" aria-flowto="reference-46-content reference-46-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 46 in the content" id="reference-46-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [46] </div> <div class="circle-list__item__grouped"><div id="reference-46-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Mahony</span>, <span class="given-names">R.</span></span> and <span class="string-name"><span class="surname">Hamel</span>, <span class="given-names">T.</span></span>, “<span class="article-title">Robust trajectory tracking for a scale model autonomous helicopter</span>,” <span class="source">Int J Robust Nonlin</span> <span class="volume">14</span>(<span class="issue">12</span>), <span class="fpage">1035</span>–<span class="lpage">1059</span> (<span class="year">2004</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Robust trajectory tracking for a scale model autonomous helicopter' href=https://dx.doi.org/10.1002/rnc.931>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Robust trajectory tracking for a scale model autonomous helicopter' href=https://scholar.google.com/scholar_lookup?title=Robust+trajectory+tracking+for+a+scale+model+autonomous+helicopter&author=Mahony+R.&author=Hamel+T.&publication+year=2004&journal=Int+J+Robust+Nonlin&volume=14&doi=10.1002%2Frnc.931&pages=1035-1059>Google Scholar</a></div></div></div><div id="ref47" aria-flowto="reference-47-content reference-47-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 47 in the content" id="reference-47-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [47] </div> <div class="circle-list__item__grouped"><div id="reference-47-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Nonami</span>, <span class="given-names">K.</span></span>, <span class="string-name"><span class="surname">Kendoul</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Suzuki</span>, <span class="given-names">S.</span></span> and <span class="string-name"><span class="surname">Wang</span>, <span class="given-names">W.</span></span>, “<span class="chapter-title">Autonomous Flying Robots</span>,” In: <span class="source">Unmanned Aerial Vehicles and Micro Aerial Vehicles</span>, (<span class="publisher-name">Springer-Verlag</span>, <span class="publisher-loc">Berlin Heidelberg, D</span>, <span class="year">2010</span>).<a class='ref-link' target='_blank' aria-label='Google Scholar link for Unmanned Aerial Vehicles and Micro Aerial Vehicles' href=https://scholar.google.com/scholar_lookup?title=Unmanned+Aerial+Vehicles+and+Micro+Aerial+Vehicles&author=Nonami+K.&author=Kendoul+F.&author=Suzuki+S.&author=Wang+W.&publication+year=2010>Google Scholar</a></div></div></div><div id="ref48" aria-flowto="reference-48-content reference-48-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 48 in the content" id="reference-48-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [48] </div> <div class="circle-list__item__grouped"><div id="reference-48-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Spica</span>, <span class="given-names">R.</span></span>, <span class="string-name"><span class="surname">Franchi</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Oriolo</span>, <span class="given-names">G.</span></span>, <span class="string-name"><span class="surname">Bülthoff</span>, <span class="given-names">H.</span></span> and <span class="string-name"><span class="surname">Giordano</span>, <span class="given-names">P. R.</span></span>, “<span class="article-title">Aerial Grasping of a Moving Target with a Quadrotor UAV</span>,” In: <em class="italic">IEEE/RSJ International Conference on Intelligent Robots and Systems</em>, (<span class="year">2012</span>) pp. <span class="fpage">4985</span>–<span class="lpage">4992</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Aerial Grasping of a Moving Target with a Quadrotor UAV' href=https://scholar.google.com/scholar_lookup?title=Aerial+Grasping+of+a+Moving+Target+with+a+Quadrotor+UAV&author=Spica+R.&author=Franchi+A.&author=Oriolo+G.&author=B%C3%BClthoff+H.&author=Giordano+P.+R.&publication+year=2012>Google Scholar</a></div></div></div><div id="ref49" aria-flowto="reference-49-content reference-49-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 49 in the content" id="reference-49-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [49] </div> <div class="circle-list__item__grouped"><div id="reference-49-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Antonelli</span>, <span class="given-names">G.</span></span>, <span class="string-name"><span class="surname">Cataldi</span>, <span class="given-names">E.</span></span>, <span class="string-name"><span class="surname">Giordano</span>, <span class="given-names">P. R.</span></span>, <span class="string-name"><span class="surname">Chiaverini</span>, <span class="given-names">S.</span></span> and <span class="string-name"><span class="surname">Franchi</span>, <span class="given-names">A.</span></span>, “<span class="article-title">Experimental Validation of a New Adaptive Control Scheme for Quadrotors MAVs</span>,” In: <em class="italic">IEEE/RSJ International Conference on Intelligent Robots and Systems 2013</em>, <span class="publisher-loc">Tokyo, J</span> (<span class="year">2013</span>) pp. <span class="fpage">3496</span>–<span class="lpage">3501</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Experimental Validation of a New Adaptive Control Scheme for Quadrotors MAVs' href=https://scholar.google.com/scholar_lookup?title=Experimental+Validation+of+a+New+Adaptive+Control+Scheme+for+Quadrotors+MAVs&author=Antonelli+G.&author=Cataldi+E.&author=Giordano+P.+R.&author=Chiaverini+S.&author=Franchi+A.&publication+year=2013>Google Scholar</a></div></div></div><div id="ref50" aria-flowto="reference-50-content reference-50-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><!----></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [50] </div> <div class="circle-list__item__grouped"><div id="reference-50-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Dydek</span>, <span class="given-names">Z.</span></span>, <span class="string-name"><span class="surname">Annaswamy</span>, <span class="given-names">A.</span></span> and <span class="string-name"><span class="surname">Lavretsky</span>, <span class="given-names">E.</span></span>, “<span class="article-title">Adaptive control of quadrotor UAVs: A design trade study with flight evaluations</span>,” <span class="source">IEEE Trans Contr Syst Tech</span> <span class="volume">21</span>(<span class="issue">4</span>), <span class="fpage">1400</span>–<span class="lpage">1406</span> (<span class="year">2013</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Adaptive control of quadrotor UAVs: A design trade study with flight evaluations' href=https://dx.doi.org/10.1109/TCST.2012.2200104>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Adaptive control of quadrotor UAVs: A design trade study with flight evaluations' href=https://scholar.google.com/scholar_lookup?title=Adaptive+control+of+quadrotor+UAVs%3A+A+design+trade+study+with+flight+evaluations&author=Dydek+Z.&author=Annaswamy+A.&author=Lavretsky+E.&publication+year=2013&journal=IEEE+Trans+Contr+Syst+Tech&volume=21&doi=10.1109%2FTCST.2012.2200104&pages=1400-1406>Google Scholar</a></div></div></div><div id="ref51" aria-flowto="reference-51-content reference-51-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 51 in the content" id="reference-51-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [51] </div> <div class="circle-list__item__grouped"><div id="reference-51-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Roberts</span>, <span class="given-names">A.</span></span> and <span class="string-name"><span class="surname">Tayebi</span>, <span class="given-names">A.</span></span>, “<span class="article-title">Adaptive position tracking of VTOL UAVs</span>,” <span class="source">IEEE Trans Robot</span> <span class="volume">27</span>(<span class="issue">1</span>), <span class="fpage">129</span>–<span class="lpage">142</span> (<span class="year">2011</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Adaptive position tracking of VTOL UAVs' href=https://dx.doi.org/10.1109/TRO.2010.2092870>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Adaptive position tracking of VTOL UAVs' href=https://scholar.google.com/scholar_lookup?title=Adaptive+position+tracking+of+VTOL+UAVs&author=Roberts+A.&author=Tayebi+A.&publication+year=2011&journal=IEEE+Trans+Robot&volume=27&doi=10.1109%2FTRO.2010.2092870&pages=129-142>Google Scholar</a></div></div></div><div id="ref52" aria-flowto="reference-52-content reference-52-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 52 in the content" id="reference-52-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [52] </div> <div class="circle-list__item__grouped"><div id="reference-52-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Yüksel</span>, <span class="given-names">B.</span></span>, <span class="string-name"><span class="surname">Secchi</span>, <span class="given-names">C.</span></span>, <span class="string-name"><span class="surname">Bülthoff</span>, <span class="given-names">H.</span></span> and <span class="string-name"><span class="surname">Franchi</span>, <span class="given-names">A.</span></span>, “<span class="article-title">A Nonlinear Force Observer for Quadrotors and Application to Physical Interactive Tasks</span>,” In: <em class="italic">2014 IEEE/ASME International Conference on Advanced Intelligent Mechatronics</em>, <span class="publisher-loc">Besançon, France</span> (<span class="year">2014</span>) pp. <span class="fpage">433</span>–<span class="lpage">440</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for A Nonlinear Force Observer for Quadrotors and Application to Physical Interactive Tasks' href=https://scholar.google.com/scholar_lookup?title=A+Nonlinear+Force+Observer+for+Quadrotors+and+Application+to+Physical+Interactive+Tasks&author=Y%C3%BCksel+B.&author=Secchi+C.&author=B%C3%BClthoff+H.&author=Franchi+A.&publication+year=2014>Google Scholar</a></div></div></div><div id="ref53" aria-flowto="reference-53-content reference-53-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 53 in the content" id="reference-53-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [53] </div> <div class="circle-list__item__grouped"><div id="reference-53-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Egeland</span>, <span class="given-names">O.</span></span> and <span class="string-name"><span class="surname">Godhavn</span>, <span class="given-names">J.-M.</span></span>, “<span class="article-title">Passivity-based adaptive attitude control of a rigid spacecraft</span>,” <span class="source">IEEE Trans Automat Contr</span> <span class="volume">39</span>(<span class="issue">4</span>), <span class="fpage">842</span>–<span class="lpage">846</span> (<span class="year">1994</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Passivity-based adaptive attitude control of a rigid spacecraft' href=https://dx.doi.org/10.1109/9.286266>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Passivity-based adaptive attitude control of a rigid spacecraft' href=https://scholar.google.com/scholar_lookup?title=Passivity-based+adaptive+attitude+control+of+a+rigid+spacecraft&author=Egeland+O.&author=Godhavn+J.-M.&publication+year=1994&journal=IEEE+Trans+Automat+Contr&volume=39&doi=10.1109%2F9.286266&pages=842-846>Google Scholar</a></div></div></div><div id="ref54" aria-flowto="reference-54-content reference-54-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 54 in the content" id="reference-54-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [54] </div> <div class="circle-list__item__grouped"><div id="reference-54-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Ha</span>, <span class="given-names">C.</span></span>, <span class="string-name"><span class="surname">Zuo</span>, <span class="given-names">Z.</span></span>, <span class="string-name"><span class="surname">Choi</span>, <span class="given-names">F.</span></span> and <span class="string-name"><span class="surname">Lee</span>, <span class="given-names">D.</span></span>, “<span class="article-title">Passivity-based adaptive backstepping control of quadrotor-type UAVs</span>,” <span class="source">Robot Auton Syst</span> <span class="volume">62</span>(<span class="issue">9</span>), <span class="fpage">1305</span>–<span class="lpage">1315</span> (<span class="year">2014</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Passivity-based adaptive backstepping control of quadrotor-type UAVs' href=https://dx.doi.org/10.1016/j.robot.2014.03.019>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Passivity-based adaptive backstepping control of quadrotor-type UAVs' href=https://scholar.google.com/scholar_lookup?title=Passivity-based+adaptive+backstepping+control+of+quadrotor-type+UAVs&author=Ha+C.&author=Zuo+Z.&author=Choi+F.&author=Lee+D.&publication+year=2014&journal=Robot+Auton+Syst&volume=62&doi=10.1016%2Fj.robot.2014.03.019&pages=1305-1315>Google Scholar</a></div></div></div><div id="ref55" aria-flowto="reference-55-content reference-55-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 55 in the content" id="reference-55-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [55] </div> <div class="circle-list__item__grouped"><div id="reference-55-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Valvanis</span>, <span class="given-names">K.</span></span>. <span class="source">Advances in Unmanned Aerial Vehicles: State of the Art and the Road to Autonomy volume 33 of Intelligent Systems, Control and Automation: Science and Engineering</span> (<span class="publisher-name">Springer</span>, <span class="publisher-loc">Netherlands</span>, <span class="year">2007</span>).<a class='ref-link' target='_blank' aria-label='Google Scholar link for Advances in Unmanned Aerial Vehicles: State of the Art and the Road to Autonomy volume 33 of Intelligent Systems, Control and Automation: Science and Engineering' href=https://scholar.google.com/scholar_lookup?title=Advances+in+Unmanned+Aerial+Vehicles%3A+State+of+the+Art+and+the+Road+to+Autonomy+volume+33+of+Intelligent+Systems%2C+Control+and+Automation%3A+Science+and+Engineering&author=Valvanis+K.&publication+year=2007>Google Scholar</a></div></div></div><div id="ref56" aria-flowto="reference-56-content reference-56-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 56 in the content" id="reference-56-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [56] </div> <div class="circle-list__item__grouped"><div id="reference-56-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Valvanis</span>, <span class="given-names">K.</span></span> and <span class="string-name"><span class="surname">Vachtsevanos</span>, <span class="given-names">G.</span></span>. <span class="source">Handbook of Unmanned Aerial Vehicles</span> (<span class="publisher-name">Springer</span>, <span class="publisher-loc">Netherlands</span>, <span class="year">2015</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Handbook of Unmanned Aerial Vehicles' href=https://dx.doi.org/10.1007/978-90-481-9707-1>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Handbook of Unmanned Aerial Vehicles' href=https://scholar.google.com/scholar_lookup?title=Handbook+of+Unmanned+Aerial+Vehicles&author=Valvanis+K.&author=Vachtsevanos+G.&publication+year=2015>Google Scholar</a></div></div></div><div id="ref57" aria-flowto="reference-57-content reference-57-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 57 in the content" id="reference-57-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [57] </div> <div class="circle-list__item__grouped"><div id="reference-57-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Oller</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Tognon</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Suarez</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Lee</span>, <span class="given-names">D.</span></span> and <span class="string-name"><span class="surname">Franchi</span>, <span class="given-names">A.</span></span>, “<span class="article-title">Past, present, and future of aerial robotic manipulators</span>,” <span class="source">IEEE Trans Robot</span> <span class="volume">38</span>(<span class="issue">1</span>), <span class="fpage">626</span>–<span class="lpage">645</span> (<span class="year">2022</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Past, present, and future of aerial robotic manipulators' href=https://dx.doi.org/10.1109/TRO.2021.3084395>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Past, present, and future of aerial robotic manipulators' href=https://scholar.google.com/scholar_lookup?title=Past%2C+present%2C+and+future+of+aerial+robotic+manipulators&author=Oller+A.&author=Tognon+M.&author=Suarez+A.&author=Lee+D.&author=Franchi+A.&publication+year=2022&journal=IEEE+Trans+Robot&volume=38&doi=10.1109%2FTRO.2021.3084395&pages=626-645>Google Scholar</a></div></div></div><div id="ref58" aria-flowto="reference-58-content reference-58-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 58 in the content" id="reference-58-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [58] </div> <div class="circle-list__item__grouped"><div id="reference-58-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span> and <span class="string-name"><span class="surname">Ollero</span>, <span class="given-names">A.</span></span>, “<span class="article-title">Aerial manipulation: A literature review</span>,” <span class="source">IEEE Robot Auto Lett</span> <span class="volume">3</span>(<span class="issue">3</span>), <span class="fpage">1957</span>–<span class="lpage">1964</span> (<span class="year">2018</span>a).<a class='ref-link' target='_blank' aria-label='CrossRef link for Aerial manipulation: A literature review' href=https://dx.doi.org/10.1109/LRA.2018.2808541>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Aerial manipulation: A literature review' href=https://scholar.google.com/scholar_lookup?title=Aerial+manipulation%3A+A+literature+review&author=Ruggiero+F.&author=Lippiello+V.&author=Ollero+A.&publication+year=2018&journal=IEEE+Robot+Auto+Lett&volume=3&doi=10.1109%2FLRA.2018.2808541&pages=1957-1964>Google Scholar</a></div></div></div><div id="ref59" aria-flowto="reference-59-content reference-59-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 59 in the content" id="reference-59-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [59] </div> <div class="circle-list__item__grouped"><div id="reference-59-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Cacace</span>, <span class="given-names">J.</span></span>, <span class="string-name"><span class="surname">Sadeghian</span>, <span class="given-names">H.</span></span> and <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span>, “<span class="article-title">Passivity-based control of vtol uavs with a momentum-based estimator of external wrench and unmodeled dynamics</span>,” <span class="source">Robot Auton Syst</span> <span class="volume">72</span>, <span class="fpage">139</span>–<span class="lpage">151</span> (<span class="year">2015</span>a).<a class='ref-link' target='_blank' aria-label='CrossRef link for Passivity-based control of vtol uavs with a momentum-based estimator of external wrench and unmodeled dynamics' href=https://dx.doi.org/10.1016/j.robot.2015.05.006>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Passivity-based control of vtol uavs with a momentum-based estimator of external wrench and unmodeled dynamics' href=https://scholar.google.com/scholar_lookup?title=Passivity-based+control+of+vtol+uavs+with+a+momentum-based+estimator+of+external+wrench+and+unmodeled+dynamics&author=Ruggiero+F.&author=Cacace+J.&author=Sadeghian+H.&author=Lippiello+V.&publication+year=2015&journal=Robot+Auton+Syst&volume=72&doi=10.1016%2Fj.robot.2015.05.006&pages=139-151>Google Scholar</a></div></div></div><div id="ref60" aria-flowto="reference-60-content reference-60-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 60 in the content" id="reference-60-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [60] </div> <div class="circle-list__item__grouped"><div id="reference-60-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Sotos</span>, <span class="given-names">S. M. O.</span></span>, <span class="string-name"><span class="surname">Cacace</span>, <span class="given-names">J.</span></span>, <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span> and <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span>, “<span class="article-title">Active disturbance rejection control for the robust flight of a passively tilted hexarotor</span>,” <span class="source">Drones</span> <span class="volume">6</span>(<span class="issue">9</span>), <span class="fpage">250</span> (<span class="year">2022</span>a).<a class='ref-link' target='_blank' aria-label='Google Scholar link for Active disturbance rejection control for the robust flight of a passively tilted hexarotor' href=https://scholar.google.com/scholar_lookup?title=Active+disturbance+rejection+control+for+the+robust+flight+of+a+passively+tilted+hexarotor&author=Sotos+S.+M.+O.&author=Cacace+J.&author=Ruggiero+F.&author=Lippiello+V.&publication+year=2022&journal=Drones&volume=6>Google Scholar</a></div></div></div><div id="ref61" aria-flowto="reference-61-content reference-61-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 61 in the content" id="reference-61-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [61] </div> <div class="circle-list__item__grouped"><div id="reference-61-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Sotos</span>, <span class="given-names">S. M. O.</span></span>, <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span> and <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span>, “<span class="article-title">Globally attractive hyperbolic control for the robust flight of an actively tilting quadrotor</span>,” <span class="source">Drones</span> <span class="volume">6</span>(<span class="issue">12</span>), <span class="fpage">373</span> (<span class="year">2022</span>b).<a class='ref-link' target='_blank' aria-label='Google Scholar link for Globally attractive hyperbolic control for the robust flight of an actively tilting quadrotor' href=https://scholar.google.com/scholar_lookup?title=Globally+attractive+hyperbolic+control+for+the+robust+flight+of+an+actively+tilting+quadrotor&author=Sotos+S.+M.+O.&author=Ruggiero+F.&author=Lippiello+V.&publication+year=2022&journal=Drones&volume=6>Google Scholar</a></div></div></div><div id="ref62" aria-flowto="reference-62-content reference-62-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 62 in the content" id="reference-62-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [62] </div> <div class="circle-list__item__grouped"><div id="reference-62-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span>, <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span> and <span class="string-name"><span class="surname">Serra</span>, <span class="given-names">D.</span></span>, “<span class="article-title">Emergency Landing for a Quadrotor in Case of a Propeller Failure: A Backstepping Approach</span>,” In: <em class="italic">IEEE/RSJ International Conference on Intelligent Robots and Systems</em>, (<span class="year">2014</span>a) pp. <span class="fpage">4782</span>–<span class="lpage">4788</span>.<a class='ref-link' target='_blank' aria-label='CrossRef link for Emergency Landing for a Quadrotor in Case of a Propeller Failure: A Backstepping Approach' href=https://dx.doi.org/10.1109/IROS.2014.6943242>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Emergency Landing for a Quadrotor in Case of a Propeller Failure: A Backstepping Approach' href=https://scholar.google.com/scholar_lookup?title=Emergency+Landing+for+a+Quadrotor+in+Case+of+a+Propeller+Failure%3A+A+Backstepping+Approach&author=Lippiello+V.&author=Ruggiero+F.&author=Serra+D.&publication+year=2014>Google Scholar</a></div></div></div><div id="ref63" aria-flowto="reference-63-content reference-63-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 63 in the content" id="reference-63-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [63] </div> <div class="circle-list__item__grouped"><div id="reference-63-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span>, <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span> and <span class="string-name"><span class="surname">Serra</span>, <span class="given-names">D.</span></span>. <span class="article-title">Emergency Landing for a Quadrotor in Case of a Propeller Failure: A PID Based Approach</span>. In: <em class="italic">12th IEEE International Symposium on Safety, Security, and Rescue Robotics</em>, <span class="year">2014</span>b.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Emergency Landing for a Quadrotor in Case of a Propeller Failure: A PID Based Approach' href=https://scholar.google.com/scholar_lookup?title=Emergency+Landing+for+a+Quadrotor+in+Case+of+a+Propeller+Failure%3A+A+PID+Based+Approach&author=Lippiello+V.&author=Ruggiero+F.&author=Serra+D.&publication+year=2014>Google Scholar</a></div></div></div><div id="ref64" aria-flowto="reference-64-content reference-64-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 64 in the content" id="reference-64-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [64] </div> <div class="circle-list__item__grouped"><div id="reference-64-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">D’Ago</span>, <span class="given-names">G.</span></span>, <span class="string-name"><span class="surname">Selvaggio</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Suarez</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Gañán</span>, <span class="given-names">F. J.</span></span>, <span class="string-name"><span class="surname">Buonocore</span>, <span class="given-names">L. R.</span></span>, <span class="string-name"><span class="surname">Di Castro</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span>, <span class="string-name"><span class="surname">Ollero</span>, <span class="given-names">A.</span></span> and <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span>, “<span class="article-title">Modelling and identification methods for simulation of cable-suspended dual-arm robotic systems</span>,” <span class="source">Robot Auton Syst</span> <span class="volume">175</span>, <span class="fpage">104643</span> (<span class="year">2024</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Modelling and identification methods for simulation of cable-suspended dual-arm robotic systems' href=https://dx.doi.org/10.1016/j.robot.2024.104643>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Modelling and identification methods for simulation of cable-suspended dual-arm robotic systems' href=https://scholar.google.com/scholar_lookup?title=Modelling+and+identification+methods+for+simulation+of+cable-suspended+dual-arm+robotic+systems&author=D%E2%80%99Ago+G.&author=Selvaggio+M.&author=Suarez+A.&author=Ga%C3%B1%C3%A1n+F.+J.&author=Buonocore+L.+R.&author=Di+Castro+M.&author=Lippiello+V.&author=Ollero+A.&author=Ruggiero+F.&publication+year=2024&journal=Robot+Auton+Syst&volume=175&doi=10.1016%2Fj.robot.2024.104643>Google Scholar</a></div></div></div><div id="ref65" aria-flowto="reference-65-content reference-65-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 65 in the content" id="reference-65-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [65] </div> <div class="circle-list__item__grouped"><div id="reference-65-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Trujillo</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Cano</span>, <span class="given-names">R.</span></span>, <span class="string-name"><span class="surname">Ascorbe</span>, <span class="given-names">H.</span></span>, <span class="string-name"><span class="surname">Viguria</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Peréz</span>, <span class="given-names">C.</span></span>, <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span>, <span class="string-name"><span class="surname">Ollero</span>, <span class="given-names">A.</span></span> and <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span>, “<span class="article-title">A Multilayer Control for Multirotor uavs Equipped with a Servo Robot Arm</span>,” In: <em class="italic">IEEE International Conference on Robotics and Automation</em>, (<span class="year">2015</span>b) pp. <span class="fpage">4014</span>–<span class="lpage">4020</span>.<a class='ref-link' target='_blank' aria-label='CrossRef link for A Multilayer Control for Multirotor uavs Equipped with a Servo Robot Arm' href=https://dx.doi.org/10.1109/ICRA.2015.7139760>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for A Multilayer Control for Multirotor uavs Equipped with a Servo Robot Arm' href=https://scholar.google.com/scholar_lookup?title=A+Multilayer+Control+for+Multirotor+uavs+Equipped+with+a+Servo+Robot+Arm&author=Ruggiero+F.&author=Trujillo+M.&author=Cano+R.&author=Ascorbe+H.&author=Viguria+A.&author=Per%C3%A9z+C.&author=Lippiello+V.&author=Ollero+A.&author=Siciliano+B.&publication+year=2015>Google Scholar</a></div></div></div><div id="ref66" aria-flowto="reference-66-content reference-66-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 66 in the content" id="reference-66-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [66] </div> <div class="circle-list__item__grouped"><div id="reference-66-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Cacace</span>, <span class="given-names">J.</span></span>, <span class="string-name"><span class="surname">Fontanelli</span>, <span class="given-names">G. A.</span></span> and <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span>, “<span class="chapter-title">A Novel Hybrid Aerial-Ground Manipulator for Pipeline Inspection Tasks</span>,” In: <span class="source">Aerial Robotic Systems Physically Interacting with the Environment (AIRPHARO)</span>, Biograd na Moru, Croatia, (<span class="year">2021</span>) pp. <span class="fpage">1</span>–<span class="lpage">6</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Aerial Robotic Systems Physically Interacting with the Environment (AIRPHARO)' href=https://scholar.google.com/scholar_lookup?title=Aerial+Robotic+Systems+Physically+Interacting+with+the+Environment+(AIRPHARO)&author=Cacace+J.&author=Fontanelli+G.+A.&author=Lippiello+V.&publication+year=2021&pages=1-6>Google Scholar</a></div></div></div><div id="ref67" aria-flowto="reference-67-content reference-67-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 67 in the content" id="reference-67-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [67] </div> <div class="circle-list__item__grouped"><div id="reference-67-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Cacace</span>, <span class="given-names">J.</span></span>, <span class="string-name"><span class="surname">Silva</span>, <span class="given-names">M. D.</span></span>, <span class="string-name"><span class="surname">Fontanelli</span>, <span class="given-names">G. A.</span></span> and <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span>, “<span class="article-title">A novel articulated rover for industrial pipes inspection tasks</span>,” In: <em class="italic">IEEE/ASME International Conference on Advanced Intelligent Mechatronics (AIM)</em>, (<span class="year">2021</span>b) pp. <span class="fpage">1027</span>–<span class="lpage">1032</span>.<a class='ref-link' target='_blank' aria-label='CrossRef link for A novel articulated rover for industrial pipes inspection tasks' href=https://dx.doi.org/10.1109/AIM46487.2021.9517691>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for A novel articulated rover for industrial pipes inspection tasks' href=https://scholar.google.com/scholar_lookup?title=A+novel+articulated+rover+for+industrial+pipes+inspection+tasks&author=Cacace+J.&author=Silva+M.+D.&author=Fontanelli+G.+A.&author=Lippiello+V.&publication+year=2021>Google Scholar</a></div></div></div><div id="ref68" aria-flowto="reference-68-content reference-68-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 68 in the content" id="reference-68-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [68] </div> <div class="circle-list__item__grouped"><div id="reference-68-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Cuniato</span>, <span class="given-names">E.</span></span>, <span class="string-name"><span class="surname">Cacace</span>, <span class="given-names">J.</span></span>, <span class="string-name"><span class="surname">Selvaggio</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span> and <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span>. <span class="article-title">A Hardware-in-the- Loop Simulator for Physical Human-Aerial Manipulator Cooperation</span>. In: <em class="italic">20th International Conference on Advanced Robotics</em>, ( <span class="year">2021</span>) pp. <span class="fpage">830</span>–<span class="lpage">835</span>.<a class='ref-link' target='_blank' aria-label='CrossRef link for A Hardware-in-the- Loop Simulator for Physical Human-Aerial Manipulator Cooperation' href=https://dx.doi.org/10.1109/ICAR53236.2021.9659398>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for A Hardware-in-the- Loop Simulator for Physical Human-Aerial Manipulator Cooperation' href=https://scholar.google.com/scholar_lookup?title=A+Hardware-in-the-+Loop+Simulator+for+Physical+Human-Aerial+Manipulator+Cooperation&author=Cuniato+E.&author=Cacace+J.&author=Selvaggio+M.&author=Ruggiero+F.&author=Lippiello+V.&publication+year=2021>Google Scholar</a></div></div></div><div id="ref69" aria-flowto="reference-69-content reference-69-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 69 in the content" id="reference-69-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [69] </div> <div class="circle-list__item__grouped"><div id="reference-69-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span>, <span class="string-name"><span class="surname">Fontanelli</span>, <span class="given-names">G. A.</span></span> and <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span>, “<span class="article-title">Image-based visual-impedance control of a dual-arm aerial manipulator</span>,” <span class="source">IEEE Robot Auto Lett</span> <span class="volume">3</span>(<span class="issue">3</span>), <span class="fpage">1856</span>–<span class="lpage">1863</span> (<span class="year">2018</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Image-based visual-impedance control of a dual-arm aerial manipulator' href=https://dx.doi.org/10.1109/LRA.2018.2806091>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Image-based visual-impedance control of a dual-arm aerial manipulator' href=https://scholar.google.com/scholar_lookup?title=Image-based+visual-impedance+control+of+a+dual-arm+aerial+manipulator&author=Lippiello+V.&author=Fontanelli+G.+A.&author=Ruggiero+F.&publication+year=2018&journal=IEEE+Robot+Auto+Lett&volume=3&doi=10.1109%2FLRA.2018.2806091&pages=1856-1863>Google Scholar</a></div></div></div><div id="ref70" aria-flowto="reference-70-content reference-70-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 70 in the content" id="reference-70-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [70] </div> <div class="circle-list__item__grouped"><div id="reference-70-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">D’Angelo</span>, <span class="given-names">S.</span></span>, <span class="string-name"><span class="surname">Pagano</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Ruggiero</span>, <span class="given-names">F.</span></span> and <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span>, “<span class="article-title">Development of a Control Framework to Autonomously Install Clip Bird Diverters on High-Voltage Lines</span>,” In: <em class="italic">International Conference on Unmanned Aircraft System</em>, (<span class="year">2023</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Development of a Control Framework to Autonomously Install Clip Bird Diverters on High-Voltage Lines' href=https://dx.doi.org/10.1109/ICUAS57906.2023.10156403>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Development of a Control Framework to Autonomously Install Clip Bird Diverters on High-Voltage Lines' href=https://scholar.google.com/scholar_lookup?title=Development+of+a+Control+Framework+to+Autonomously+Install+Clip+Bird+Diverters+on+High-Voltage+Lines&author=D%E2%80%99Angelo+S.&author=Pagano+F.&author=Ruggiero+F.&author=Lippiello+V.&publication+year=2023>Google Scholar</a></div></div></div><div id="ref71" aria-flowto="reference-71-content reference-71-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 71 in the content" id="reference-71-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [71] </div> <div class="circle-list__item__grouped"><div id="reference-71-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Selvaggio</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Cognetti</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Nikolaidis</span>, <span class="given-names">S.</span></span>, <span class="string-name"><span class="surname">Ivaldi</span>, <span class="given-names">S.</span></span> and <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span>, “<span class="article-title">Autonomy in physical human-robot interaction: A brief survey</span>,” <span class="source">IEEE Robot Autom Lett</span> <span class="volume">6</span>(<span class="issue">4</span>), <span class="fpage">7989</span>–<span class="lpage">7996</span> (<span class="year">2021</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Autonomy in physical human-robot interaction: A brief survey' href=https://dx.doi.org/10.1109/LRA.2021.3100603>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Autonomy in physical human-robot interaction: A brief survey' href=https://scholar.google.com/scholar_lookup?title=Autonomy+in+physical+human-robot+interaction%3A+A+brief+survey&author=Selvaggio+M.&author=Cognetti+M.&author=Nikolaidis+S.&author=Ivaldi+S.&author=Siciliano+B.&publication+year=2021&journal=IEEE+Robot+Autom+Lett&volume=6&doi=10.1109%2FLRA.2021.3100603&pages=7989-7996>Google Scholar</a></div></div></div><div id="ref72" aria-flowto="reference-72-content reference-72-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 72 in the content" id="reference-72-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [72] </div> <div class="circle-list__item__grouped"><div id="reference-72-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Johannsmeier</span>, <span class="given-names">L.</span></span> and <span class="string-name"><span class="surname">Haddadin</span>, <span class="given-names">S.</span></span>, “<span class="article-title">A hierarchical human-robot interaction-planning framework for task allocation in collaborative industrial assembly processes</span>,” <span class="source">IEEE Robot Autom Lett</span> <span class="volume">2</span>(<span class="issue">1</span>), <span class="fpage">41</span>–<span class="lpage">48</span> (<span class="year">2017</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for A hierarchical human-robot interaction-planning framework for task allocation in collaborative industrial assembly processes' href=https://dx.doi.org/10.1109/LRA.2016.2535907>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for A hierarchical human-robot interaction-planning framework for task allocation in collaborative industrial assembly processes' href=https://scholar.google.com/scholar_lookup?title=A+hierarchical+human-robot+interaction-planning+framework+for+task+allocation+in+collaborative+industrial+assembly+processes&author=Johannsmeier+L.&author=Haddadin+S.&publication+year=2017&journal=IEEE+Robot+Autom+Lett&volume=2&doi=10.1109%2FLRA.2016.2535907&pages=41-48>Google Scholar</a></div></div></div><div id="ref73" aria-flowto="reference-73-content reference-73-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 73 in the content" id="reference-73-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [73] </div> <div class="circle-list__item__grouped"><div id="reference-73-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Cacace</span>, <span class="given-names">J.</span></span>, <span class="string-name"><span class="surname">Caccavale</span>, <span class="given-names">R.</span></span>, <span class="string-name"><span class="surname">Finzi</span>, <span class="given-names">A.</span></span> and <span class="string-name"><span class="surname">Grieco</span>, <span class="given-names">R.</span></span>, “<span class="article-title">Combining human guidance and structured task execution during physical human–robot collaboration</span>,” <span class="source">J Intell Manuf</span> <span class="volume">34</span>(<span class="issue">7</span>), <span class="fpage">3053</span>–<span class="lpage">3067</span> (<span class="year">2022</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Combining human guidance and structured task execution during physical human–robot collaboration' href=https://dx.doi.org/10.1007/s10845-022-01989-y>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Combining human guidance and structured task execution during physical human–robot collaboration' href=https://scholar.google.com/scholar_lookup?title=Combining+human+guidance+and+structured+task+execution+during+physical+human%E2%80%93robot+collaboration&author=Cacace+J.&author=Caccavale+R.&author=Finzi+A.&author=Grieco+R.&publication+year=2022&journal=J+Intell+Manuf&volume=34&doi=10.1007%2Fs10845-022-01989-y&pages=3053-3067>Google Scholar</a></div></div></div><div id="ref74" aria-flowto="reference-74-content reference-74-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 74 in the content" id="reference-74-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [74] </div> <div class="circle-list__item__grouped"><div id="reference-74-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Cacace</span>, <span class="given-names">J.</span></span>, <span class="string-name"><span class="surname">Caccavale</span>, <span class="given-names">R.</span></span>, <span class="string-name"><span class="surname">Finzi</span>, <span class="given-names">A.</span></span> and <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span>, “<span class="article-title">Interactive plan execution during human-robot cooperative manipulation</span>,” <span class="source">IFAC-PapersOnLine</span> <span class="volume">51</span>(<span class="issue">22</span>), <span class="fpage">500</span>–<span class="lpage">505</span> (<span class="year">2018</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Interactive plan execution during human-robot cooperative manipulation' href=https://dx.doi.org/10.1016/j.ifacol.2018.11.584>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Interactive plan execution during human-robot cooperative manipulation' href=https://scholar.google.com/scholar_lookup?title=Interactive+plan+execution+during+human-robot+cooperative+manipulation&author=Cacace+J.&author=Caccavale+R.&author=Finzi+A.&author=Lippiello+V.&publication+year=2018&journal=IFAC-PapersOnLine&volume=51&doi=10.1016%2Fj.ifacol.2018.11.584&pages=500-505>Google Scholar</a></div></div></div><div id="ref75" aria-flowto="reference-75-content reference-75-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 75 in the content" id="reference-75-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [75] </div> <div class="circle-list__item__grouped"><div id="reference-75-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Goodrich</span>, <span class="given-names">M. A.</span></span> and <span class="string-name"><span class="surname">Schultz</span>, <span class="given-names">A. C.</span></span>, “<span class="article-title">Human-robot interaction: A survey</span>,” <span class="source">Found Trends® Human-Comp Inter</span> <span class="volume">1</span>(<span class="issue">3</span>), <span class="fpage">203</span>–<span class="lpage">275</span> (<span class="year">2008</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Human-robot interaction: A survey' href=https://dx.doi.org/10.1561/1100000005>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Human-robot interaction: A survey' href=https://scholar.google.com/scholar_lookup?title=Human-robot+interaction%3A+A+survey&author=Goodrich+M.+A.&author=Schultz+A.+C.&publication+year=2008&journal=Found+Trends%C2%AE+Human-Comp+Inter&volume=1&doi=10.1561%2F1100000005&pages=203-275>Google Scholar</a></div></div></div><div id="ref76" aria-flowto="reference-76-content reference-76-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 76 in the content" id="reference-76-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [76] </div> <div class="circle-list__item__grouped"><div id="reference-76-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Yang</span>, <span class="given-names">G.-Z.</span></span>, <span class="string-name"><span class="surname">Cambias</span>, <span class="given-names">J.</span></span>, <span class="string-name"><span class="surname">Cleary</span>, <span class="given-names">K.</span></span>, <span class="string-name"><span class="surname">Daimler</span>, <span class="given-names">E.</span></span>, <span class="string-name"><span class="surname">Drake</span>, <span class="given-names">J.</span></span>, <span class="string-name"><span class="surname">Dupont</span>, <span class="given-names">P. E.</span></span>, <span class="string-name"><span class="surname">Hata</span>, <span class="given-names">N.</span></span>, <span class="string-name"><span class="surname">Kazanzides</span>, <span class="given-names">P.</span></span>, <span class="string-name"><span class="surname">Martel</span>, <span class="given-names">S.</span></span>, <span class="string-name"><span class="surname">Patel</span>, <span class="given-names">R. V.</span></span>, <span class="string-name"><span class="surname">Santos</span>, <span class="given-names">V. J.</span></span> and <span class="string-name"><span class="surname">Taylor</span>, <span class="given-names">R. H.</span></span>, “<span class="article-title">Medical robotics 2014;regulatory, ethical, and legal considerations for increasing levels of autonomy</span>,” <span class="source">Sci Robot</span> <span class="volume">2</span>(<span class="issue">4</span>), <span class="fpage">eaam8638</span> (<span class="year">2017</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Medical robotics 2014;regulatory, ethical, and legal considerations for increasing levels of autonomy' href=https://dx.doi.org/10.1126/scirobotics.aam8638>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Medical robotics 2014;regulatory, ethical, and legal considerations for increasing levels of autonomy' href=https://scholar.google.com/scholar_lookup?title=Medical+robotics+2014%3Bregulatory%2C+ethical%2C+and+legal+considerations+for+increasing+levels+of+autonomy&author=Yang+G.-Z.&author=Cambias+J.&author=Cleary+K.&author=Daimler+E.&author=Drake+J.&author=Dupont+P.+E.&author=Hata+N.&author=Kazanzides+P.&author=Martel+S.&author=Patel+R.+V.&author=Santos+V.+J.&author=Taylor+R.+H.&publication+year=2017&journal=Sci+Robot&volume=2&doi=10.1126%2Fscirobotics.aam8638>Google Scholar</a></div></div></div><div id="ref77" aria-flowto="reference-77-content reference-77-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 77 in the content" id="reference-77-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [77] </div> <div class="circle-list__item__grouped"><div id="reference-77-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Kanda</span>, <span class="given-names">T.</span></span> and <span class="string-name"><span class="surname">Ishiguro</span>, <span class="given-names">H.</span></span>. <span class="source">Human-Robot Interaction in Social Robotics</span> (<span class="publisher-name">CRC Press Boca Raton</span>, <span class="publisher-loc">Florida</span>, (<span class="year">2017</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Human-Robot Interaction in Social Robotics' href=https://dx.doi.org/10.1201/b13004>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Human-Robot Interaction in Social Robotics' href=https://scholar.google.com/scholar_lookup?title=Human-Robot+Interaction+in+Social+Robotics&author=Kanda+T.&author=Ishiguro+H.&publication+year=2017>Google Scholar</a></div></div></div><div id="ref78" aria-flowto="reference-78-content reference-78-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 78 in the content" id="reference-78-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [78] </div> <div class="circle-list__item__grouped"><div id="reference-78-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Schilling</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Burgard</span>, <span class="given-names">W.</span></span>, <span class="string-name"><span class="surname">Muelling</span>, <span class="given-names">K.</span></span>, <span class="string-name"><span class="surname">Wrede</span>, <span class="given-names">B.</span></span> and <span class="string-name"><span class="surname">Ritter</span>, <span class="given-names">H.</span></span>, “<span class="article-title">Shared autonomy–learning of joint action and human-robot collaboration</span>,” <span class="source">Front Neurorobotics</span> <span class="volume">13</span>, <span class="fpage">16</span> (<span class="year">2019</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Shared autonomy–learning of joint action and human-robot collaboration' href=https://dx.doi.org/10.3389/fnbot.2019.00016>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Shared autonomy–learning of joint action and human-robot collaboration' href=https://scholar.google.com/scholar_lookup?title=Shared+autonomy%E2%80%93learning+of+joint+action+and+human-robot+collaboration&author=Schilling+M.&author=Burgard+W.&author=Muelling+K.&author=Wrede+B.&author=Ritter+H.&publication+year=2019&journal=Front+Neurorobotics&volume=13&doi=10.3389%2Ffnbot.2019.00016>Google Scholar</a><a class='ref-link' target='_blank' aria-label='PubMed link for Shared autonomy–learning of joint action and human-robot collaboration' href=https://www.ncbi.nlm.nih.gov/pubmed/31156417>PubMed</a></div></div></div><div id="ref79" aria-flowto="reference-79-content reference-79-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 79 in the content" id="reference-79-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [79] </div> <div class="circle-list__item__grouped"><div id="reference-79-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Bruemmer</span>, <span class="given-names">D. J.</span></span>, <span class="string-name"><span class="surname">Dudenhoeffer</span>, <span class="given-names">D. D.</span></span> and <span class="string-name"><span class="surname">Marble</span>, <span class="given-names">J. L.</span></span>, “<span class="chapter-title">Dynamic-Autonomy for Urban Search and Rescue</span>,” In: <span class="source">AAAI Mobile Robot Competition</span>, (<span class="year">2002</span>) pp. <span class="fpage">33</span>–<span class="lpage">37</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for AAAI Mobile Robot Competition' href=https://scholar.google.com/scholar_lookup?title=AAAI+Mobile+Robot+Competition&author=Bruemmer+D.+J.&author=Dudenhoeffer+D.+D.&author=Marble+J.+L.&publication+year=2002&pages=33-37>Google Scholar</a></div></div></div><div id="ref80" aria-flowto="reference-80-content reference-80-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 80 in the content" id="reference-80-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [80] </div> <div class="circle-list__item__grouped"><div id="reference-80-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Dias</span>, <span class="given-names">M. B.</span></span>, <span class="string-name"><span class="surname">Kannan</span>, <span class="given-names">B.</span></span>, <span class="string-name"><span class="surname">Browning</span>, <span class="given-names">B.</span></span>, <span class="string-name"><span class="surname">Jones</span>, <span class="given-names">E.</span></span>, <span class="string-name"><span class="surname">Argall</span>, <span class="given-names">B.</span></span>, <span class="string-name"><span class="surname">Dias</span>, <span class="given-names">M. F.</span></span>, <span class="string-name"><span class="surname">Zinck</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Veloso</span>, <span class="given-names">M.</span></span> and <span class="string-name"><span class="surname">Stentz</span>, <span class="given-names">A.</span></span>, “<span class="article-title">Sliding Autonomy for Peer-to-Peer Human-Robot Teams</span>,” In: <em class="italic">International Conference on Intelligent Autonomous Systems</em>, (<span class="year">2008</span>) pp. <span class="fpage">332</span>–<span class="lpage">341</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Sliding Autonomy for Peer-to-Peer Human-Robot Teams' href=https://scholar.google.com/scholar_lookup?title=Sliding+Autonomy+for+Peer-to-Peer+Human-Robot+Teams&author=Dias+M.+B.&author=Kannan+B.&author=Browning+B.&author=Jones+E.&author=Argall+B.&author=Dias+M.+F.&author=Zinck+M.&author=Veloso+M.&author=Stentz+A.&publication+year=2008>Google Scholar</a></div></div></div><div id="ref81" aria-flowto="reference-81-content reference-81-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 81 in the content" id="reference-81-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [81] </div> <div class="circle-list__item__grouped"><div id="reference-81-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Kortenkamp</span>, <span class="given-names">D.</span></span>, <span class="string-name"><span class="surname">Keirn-Schreckenghost</span>, <span class="given-names">D.</span></span> and <span class="string-name"><span class="surname">Bonasso</span>, <span class="given-names">R. P.</span></span>, “<span class="article-title">Adjustable Control Autonomy for Manned Space Flight</span>,” In: <em class="italic">IEEE Aerospace Conference (AeroConf)</em>, <span class="volume">7</span>, (<span class="year">2000</span>) pp. <span class="fpage">629</span>–<span class="lpage">640</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Adjustable Control Autonomy for Manned Space Flight' href=https://scholar.google.com/scholar_lookup?title=Adjustable+Control+Autonomy+for+Manned+Space+Flight&author=Kortenkamp+D.&author=Keirn-Schreckenghost+D.&author=Bonasso+R.+P.&publication+year=2000>Google Scholar</a></div></div></div><div id="ref82" aria-flowto="reference-82-content reference-82-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 82 in the content" id="reference-82-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [82] </div> <div class="circle-list__item__grouped"><div id="reference-82-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Anderson</span>, <span class="given-names">S.</span></span>, <span class="string-name"><span class="surname">Peters</span>, <span class="given-names">S.</span></span>, <span class="string-name"><span class="surname">Iagnemma</span>, <span class="given-names">K.</span></span> and <span class="string-name"><span class="surname">Overholt</span>, <span class="given-names">J.</span></span>, “<span class="article-title">Semi-Autonomous Stability Control and Hazard Avoidance for Manned and Unmanned Ground Vehicles</span>,” In: <em class="italic">The 27th Army Science Conference</em>, (<span class="year">2010</span>) pp. <span class="fpage">1</span>–<span class="lpage">8</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Semi-Autonomous Stability Control and Hazard Avoidance for Manned and Unmanned Ground Vehicles' href=https://scholar.google.com/scholar_lookup?title=Semi-Autonomous+Stability+Control+and+Hazard+Avoidance+for+Manned+and+Unmanned+Ground+Vehicles&author=Anderson+S.&author=Peters+S.&author=Iagnemma+K.&author=Overholt+J.&publication+year=2010>Google Scholar</a></div></div></div><div id="ref83" aria-flowto="reference-83-content reference-83-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 83 in the content" id="reference-83-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [83] </div> <div class="circle-list__item__grouped"><div id="reference-83-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Desai</span>, <span class="given-names">M.</span></span> and <span class="string-name"><span class="surname">Yanco</span>, <span class="given-names">H. A.</span></span>, “<span class="chapter-title">Blending Human and Robot Inputs for Sliding Scale Autonomy</span>,” In: <span class="source">IEEE International Workshop on Robot and Human Interactive Communication (ROMAN)</span>, (<span class="year">2005</span>) pp. <span class="fpage">537</span>–<span class="lpage">542</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for IEEE International Workshop on Robot and Human Interactive Communication (ROMAN)' href=https://scholar.google.com/scholar_lookup?title=IEEE+International+Workshop+on+Robot+and+Human+Interactive+Communication+(ROMAN)&author=Desai+M.&author=Yanco+H.+A.&publication+year=2005&pages=537-542>Google Scholar</a></div></div></div><div id="ref84" aria-flowto="reference-84-content reference-84-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 84 in the content" id="reference-84-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [84] </div> <div class="circle-list__item__grouped"><div id="reference-84-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Pitzer</span>, <span class="given-names">B.</span></span>, <span class="string-name"><span class="surname">Styer</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Bersch</span>, <span class="given-names">C.</span></span>, <span class="string-name"><span class="surname">DuHadway</span>, <span class="given-names">C.</span></span> and <span class="string-name"><span class="surname">Becker</span>, <span class="given-names">J.</span></span>, “<span class="article-title">Towards Perceptual Shared Autonomy for Robotic Mobile Manipulation</span>,” In: <em class="italic">IEEE International Conference on Robotics and Automation</em>, ( <span class="year">2011</span>) pp. <span class="fpage">6245</span>–<span class="lpage">6251</span>.<a class='ref-link' target='_blank' aria-label='CrossRef link for Towards Perceptual Shared Autonomy for Robotic Mobile Manipulation' href=https://dx.doi.org/10.1109/ICRA.2011.5980259>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Towards Perceptual Shared Autonomy for Robotic Mobile Manipulation' href=https://scholar.google.com/scholar_lookup?title=Towards+Perceptual+Shared+Autonomy+for+Robotic+Mobile+Manipulation&author=Pitzer+B.&author=Styer+M.&author=Bersch+C.&author=DuHadway+C.&author=Becker+J.&publication+year=2011>Google Scholar</a></div></div></div><div id="ref85" aria-flowto="reference-85-content reference-85-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 85 in the content" id="reference-85-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [85] </div> <div class="circle-list__item__grouped"><div id="reference-85-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Sellner</span>, <span class="given-names">B.</span></span>, <span class="string-name"><span class="surname">Simmons</span>, <span class="given-names">R.</span></span> and <span class="string-name"><span class="surname">Singh</span>, <span class="given-names">S.</span></span>, “<span class="chapter-title">User Modelling for Principled Sliding Autonomy in Human-Robot Teams</span>,” In: <span class="source">Multi-Robot Systems. From Swarms to Intelligent Automata</span>. vol. <span class="volume">3</span> (<span class="publisher-name">Springer</span>, <span class="year">2005</span>) pp. <span class="fpage">197</span>–<span class="lpage">208</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Multi-Robot Systems. From Swarms to Intelligent Automata' href=https://scholar.google.com/scholar_lookup?title=Multi-Robot+Systems.+From+Swarms+to+Intelligent+Automata&author=Sellner+B.&author=Simmons+R.&author=Singh+S.&publication+year=2005&pages=197-208>Google Scholar</a></div></div></div><div id="ref86" aria-flowto="reference-86-content reference-86-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 86 in the content" id="reference-86-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [86] </div> <div class="circle-list__item__grouped"><div id="reference-86-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Dragan</span>, <span class="given-names">A. D.</span></span> and <span class="string-name"><span class="surname">Srinivasa</span>, <span class="given-names">S. S.</span></span>, “<span class="article-title">A Policy-Blending Formalism for Shared Control</span>,” <span class="source">Int J Robot Res</span> <span class="volume">32</span>(<span class="issue">7</span>), <span class="fpage">790</span>–<span class="lpage">805</span> (<span class="year">2013</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for A Policy-Blending Formalism for Shared Control' href=https://dx.doi.org/10.1177/0278364913490324>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for A Policy-Blending Formalism for Shared Control' href=https://scholar.google.com/scholar_lookup?title=A+Policy-Blending+Formalism+for+Shared+Control&author=Dragan+A.+D.&author=Srinivasa+S.+S.&publication+year=2013&journal=Int+J+Robot+Res&volume=32&doi=10.1177%2F0278364913490324&pages=790-805>Google Scholar</a></div></div></div><div id="ref87" aria-flowto="reference-87-content reference-87-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 87 in the content" id="reference-87-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [87] </div> <div class="circle-list__item__grouped"><div id="reference-87-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Javdani</span>, <span class="given-names">S.</span></span>, <span class="string-name"><span class="surname">Srinivasa</span>, <span class="given-names">S. S.</span></span> and <span class="string-name"><span class="surname">Bagnell</span>, <span class="given-names">J. A.</span></span>, “<span class="chapter-title">Shared Autonomy Via Hindsight Optimization</span>,” In: <span class="source">Robotics Science and Systems</span>, (<span class="publisher-name">NIH Public Access</span>, <span class="year">2015</span>).<a class='ref-link' target='_blank' aria-label='Google Scholar link for Robotics Science and Systems' href=https://scholar.google.com/scholar_lookup?title=Robotics+Science+and+Systems&author=Javdani+S.&author=Srinivasa+S.+S.&author=Bagnell+J.+A.&publication+year=2015>Google Scholar</a></div></div></div><div id="ref88" aria-flowto="reference-88-content reference-88-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 88 in the content" id="reference-88-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [88] </div> <div class="circle-list__item__grouped"><div id="reference-88-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Aarno</span>, <span class="given-names">D.</span></span>, <span class="string-name"><span class="surname">Ekvall</span>, <span class="given-names">S.</span></span> and <span class="string-name"><span class="surname">Kragic</span>, <span class="given-names">D.</span></span>, “<span class="article-title">Adaptive virtual fixtures for machine-assisted teleoperation tasks</span>,” In: <em class="italic">IEEE International Conference on Robotics and Automation</em>, (<span class="year">2005</span>) pp<span class="fpage">1139</span>–<span class="lpage">1144</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Adaptive virtual fixtures for machine-assisted teleoperation tasks' href=https://scholar.google.com/scholar_lookup?title=Adaptive+virtual+fixtures+for+machine-assisted+teleoperation+tasks&author=Aarno+D.&author=Ekvall+S.&author=Kragic+D.&publication+year=2005>Google Scholar</a></div></div></div><div id="ref89" aria-flowto="reference-89-content reference-89-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 89 in the content" id="reference-89-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [89] </div> <div class="circle-list__item__grouped"><div id="reference-89-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Crandall</span>, <span class="given-names">J. W.</span></span> and <span class="string-name"><span class="surname">Goodrich</span>, <span class="given-names">M. A.</span></span>. <span class="article-title">Characterizing Efficiency of Human Robot Interaction: A Case Study of Shared-Control teleoperation</span>. In: <em class="italic">IEEE/RSJ International Conference on Intelligent Robots and Systems</em>, (<span class="year">2002</span>) pp. <span class="fpage">1290</span>–<span class="lpage">1295</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Characterizing Efficiency of Human Robot Interaction: A Case Study of Shared-Control teleoperation' href=https://scholar.google.com/scholar_lookup?title=Characterizing+Efficiency+of+Human+Robot+Interaction%3A+A+Case+Study+of+Shared-Control+teleoperation&author=Crandall+J.+W.&author=Goodrich+M.+A.&publication+year=2002>Google Scholar</a></div></div></div><div id="ref90" aria-flowto="reference-90-content reference-90-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 90 in the content" id="reference-90-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [90] </div> <div class="circle-list__item__grouped"><div id="reference-90-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Selvaggio</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Abi-Farraj</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Pacchierotti</span>, <span class="given-names">C.</span></span>, <span class="string-name"><span class="surname">Giordano</span>, <span class="given-names">P. R.</span></span> and <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span>, “<span class="article-title">Haptic-based shared-control methods for a dual-arm system</span>,” <span class="source">IEEE Robot Auto Lett</span> <span class="volume">3</span>(<span class="issue">4</span>), <span class="fpage">4249</span>–<span class="lpage">4256</span> (<span class="year">2018</span>a).<a class='ref-link' target='_blank' aria-label='CrossRef link for Haptic-based shared-control methods for a dual-arm system' href=https://dx.doi.org/10.1109/LRA.2018.2864353>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Haptic-based shared-control methods for a dual-arm system' href=https://scholar.google.com/scholar_lookup?title=Haptic-based+shared-control+methods+for+a+dual-arm+system&author=Selvaggio+M.&author=Abi-Farraj+F.&author=Pacchierotti+C.&author=Giordano+P.+R.&author=Siciliano+B.&publication+year=2018&journal=IEEE+Robot+Auto+Lett&volume=3&doi=10.1109%2FLRA.2018.2864353&pages=4249-4256>Google Scholar</a></div></div></div><div id="ref91" aria-flowto="reference-91-content reference-91-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 91 in the content" id="reference-91-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [91] </div> <div class="circle-list__item__grouped"><div id="reference-91-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Selvaggio</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Giordano</span>, <span class="given-names">P. R.</span></span>, <span class="string-name"><span class="surname">Ficuciello</span>, <span class="given-names">F.</span></span> and <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span>. <span class="article-title">Passive task-prioritized shared-control teleoperation with haptic guidance</span>. In: <em class="italic">International Conference on Robotics and Automation (ICRA)</em>, (<span class="year">2019</span>c) pp. <span class="fpage">430</span>–<span class="lpage">436</span>.<a class='ref-link' target='_blank' aria-label='CrossRef link for Passive task-prioritized shared-control teleoperation with haptic guidance' href=https://dx.doi.org/10.1109/ICRA.2019.8794197>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Passive task-prioritized shared-control teleoperation with haptic guidance' href=https://scholar.google.com/scholar_lookup?title=Passive+task-prioritized+shared-control+teleoperation+with+haptic+guidance&author=Selvaggio+M.&author=Giordano+P.+R.&author=Ficuciello+F.&author=Siciliano+B.&publication+year=2019>Google Scholar</a></div></div></div><div id="ref92" aria-flowto="reference-92-content reference-92-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 92 in the content" id="reference-92-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [92] </div> <div class="circle-list__item__grouped"><div id="reference-92-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Selvaggio</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Fontanelli</span>, <span class="given-names">G. A.</span></span>, <span class="string-name"><span class="surname">Ficuciello</span>, <span class="given-names">L.</span></span>, <span class="string-name"><span class="surname">Villani</span>, <span class="given-names">F.</span></span> and <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span>, “<span class="article-title">Passive virtual fixtures adaptation in minimally invasive robotic surgery</span>,” <span class="source">IEEE Robot Auto Lett</span> <span class="volume">3</span>(<span class="issue">4</span>), <span class="fpage">3129</span>–<span class="lpage">3136</span> (<span class="year">2018</span>b).<a class='ref-link' target='_blank' aria-label='CrossRef link for Passive virtual fixtures adaptation in minimally invasive robotic surgery' href=https://dx.doi.org/10.1109/LRA.2018.2849876>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Passive virtual fixtures adaptation in minimally invasive robotic surgery' href=https://scholar.google.com/scholar_lookup?title=Passive+virtual+fixtures+adaptation+in+minimally+invasive+robotic+surgery&author=Selvaggio+M.&author=Fontanelli+G.+A.&author=Ficuciello+L.&author=Villani+F.&author=Siciliano+B.&publication+year=2018&journal=IEEE+Robot+Auto+Lett&volume=3&doi=10.1109%2FLRA.2018.2849876&pages=3129-3136>Google Scholar</a></div></div></div><div id="ref93" aria-flowto="reference-93-content reference-93-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 93 in the content" id="reference-93-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [93] </div> <div class="circle-list__item__grouped"><div id="reference-93-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Selvaggio</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Notomista</span>, <span class="given-names">G.</span></span>, <span class="string-name"><span class="surname">Chen</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Gao</span>, <span class="given-names">B.</span></span>, <span class="string-name"><span class="surname">Trapani</span>, <span class="given-names">F.</span></span> and <span class="string-name"><span class="surname">Caldwell</span>, <span class="given-names">D.</span></span>. <span class="article-title">Enhancing bilateral teleoperation using camera-based online virtual fixtures generation</span>. In: <em class="italic">IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)</em>, (<span class="year">2016</span>) pp. <span class="fpage">1483</span>–<span class="lpage">1488</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Enhancing bilateral teleoperation using camera-based online virtual fixtures generation' href=https://scholar.google.com/scholar_lookup?title=Enhancing+bilateral+teleoperation+using+camera-based+online+virtual+fixtures+generation&author=Selvaggio+M.&author=Notomista+G.&author=Chen+F.&author=Gao+B.&author=Trapani+F.&author=Caldwell+D.&publication+year=2016>Google Scholar</a></div></div></div><div id="ref94" aria-flowto="reference-94-content reference-94-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 94 in the content" id="reference-94-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [94] </div> <div class="circle-list__item__grouped"><div id="reference-94-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Selvaggio</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">A.Ghalamzan</span>, <span class="given-names">E.</span></span>, <span class="string-name"><span class="surname">Moccia</span>, <span class="given-names">R.</span></span>, <span class="string-name"><span class="surname">Ficuciello</span>, <span class="given-names">F.</span></span> and <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span>. <span class="article-title">Haptic-guided shared control for needle grasping optimization in minimally invasive robotic surgery</span>. In: <em class="italic">IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)</em>, (<span class="year">2019</span>b) pp. <span class="fpage">3617</span>–<span class="lpage">3623</span>.<a class='ref-link' target='_blank' aria-label='CrossRef link for Haptic-guided shared control for needle grasping optimization in minimally invasive robotic surgery' href=https://dx.doi.org/10.1109/IROS40897.2019.8968109>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Haptic-guided shared control for needle grasping optimization in minimally invasive robotic surgery' href=https://scholar.google.com/scholar_lookup?title=Haptic-guided+shared+control+for+needle+grasping+optimization+in+minimally+invasive+robotic+surgery&author=Selvaggio+M.&author=A.Ghalamzan+E.&author=Moccia+R.&author=Ficuciello+F.&author=Siciliano+B.&publication+year=2019>Google Scholar</a></div></div></div><div id="ref95" aria-flowto="reference-95-content reference-95-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 95 in the content" id="reference-95-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [95] </div> <div class="circle-list__item__grouped"><div id="reference-95-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Rodriguez-Guerra</span>, <span class="given-names">D.</span></span>, <span class="string-name"><span class="surname">Sorrosal</span>, <span class="given-names">G.</span></span>, <span class="string-name"><span class="surname">Cabanes</span>, <span class="given-names">I.</span></span> and <span class="string-name"><span class="surname">Calleja</span>, <span class="given-names">C.</span></span>, “<span class="article-title">Human-robot interaction review: Challenges and solutions for modern industrial environments</span>,” <span class="source">IEEE Access</span> <span class="volume">9</span>, <span class="fpage">108557</span>–<span class="lpage">108578</span> (<span class="year">2021</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Human-robot interaction review: Challenges and solutions for modern industrial environments' href=https://dx.doi.org/10.1109/ACCESS.2021.3099287>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Human-robot interaction review: Challenges and solutions for modern industrial environments' href=https://scholar.google.com/scholar_lookup?title=Human-robot+interaction+review%3A+Challenges+and+solutions+for+modern+industrial+environments&author=Rodriguez-Guerra+D.&author=Sorrosal+G.&author=Cabanes+I.&author=Calleja+C.&publication+year=2021&journal=IEEE+Access&volume=9&doi=10.1109%2FACCESS.2021.3099287&pages=108557-108578>Google Scholar</a></div></div></div><div id="ref96" aria-flowto="reference-96-content reference-96-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 96 in the content" id="reference-96-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [96] </div> <div class="circle-list__item__grouped"><div id="reference-96-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Schultheis</span>, <span class="given-names">H.</span></span> and <span class="string-name"><span class="surname">Cooper</span>, <span class="given-names">R. P.</span></span>, <span class="article-title">Everyday activities</span>, (<span class="year">2022</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Everyday activities' href=https://dx.doi.org/10.1111/tops.12603>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Everyday activities' href=https://scholar.google.com/scholar_lookup?title=Everyday+activities&author=Schultheis+H.&author=Cooper+R.+P.&publication+year=2022>Google Scholar</a></div></div></div><div id="ref97" aria-flowto="reference-97-content reference-97-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 97 in the content" id="reference-97-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [97] </div> <div class="circle-list__item__grouped"><div id="reference-97-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Beetz</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Beßler</span>, <span class="given-names">D.</span></span>, <span class="string-name"><span class="surname">Haidu</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Pomarlan</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Bozcuoğlu</span>, <span class="given-names">A. K.</span></span> and <span class="string-name"><span class="surname">Bartels</span>, <span class="given-names">G.</span></span>, “<span class="article-title">Know rob 2.0–a 2nd Generation Knowledge Processing Framework for Cognition-Enabled Robotic Agents</span>,” In: <em class="italic">IEEE International Conference on Robotics and Automation (ICRA) 2018</em>, <span class="publisher-name">IEEE</span> (<span class="year">2018</span>) pp. <span class="fpage">512</span>–<span class="lpage">519</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Know rob 2.0–a 2nd Generation Knowledge Processing Framework for Cognition-Enabled Robotic Agents' href=https://scholar.google.com/scholar_lookup?title=Know+rob+2.0%E2%80%93a+2nd+Generation+Knowledge+Processing+Framework+for+Cognition-Enabled+Robotic+Agents&author=Beetz+M.&author=Be%C3%9Fler+D.&author=Haidu+A.&author=Pomarlan+M.&author=Bozcuo%C4%9Flu+A.+K.&author=Bartels+G.&publication+year=2018>Google Scholar</a></div></div></div><div id="ref98" aria-flowto="reference-98-content reference-98-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 98 in the content" id="reference-98-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [98] </div> <div class="circle-list__item__grouped"><div id="reference-98-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Lemaignan</span>, <span class="given-names">S.</span></span>, <span class="string-name"><span class="surname">Warnier</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Sisbot</span>, <span class="given-names">E. A.</span></span>, <span class="string-name"><span class="surname">Clodic</span>, <span class="given-names">A.</span></span> and <span class="string-name"><span class="surname">Alami</span>, <span class="given-names">R.</span></span>, “<span class="article-title">Artificial cognition for social human–robot interaction: An implementation</span>,” <span class="source">Artif Intell</span> <span class="volume">247</span>, <span class="fpage">45</span>–<span class="lpage">69</span> (<span class="year">2017</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Artificial cognition for social human–robot interaction: An implementation' href=https://dx.doi.org/10.1016/j.artint.2016.07.002>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Artificial cognition for social human–robot interaction: An implementation' href=https://scholar.google.com/scholar_lookup?title=Artificial+cognition+for+social+human%E2%80%93robot+interaction%3A+An+implementation&author=Lemaignan+S.&author=Warnier+M.&author=Sisbot+E.+A.&author=Clodic+A.&author=Alami+R.&publication+year=2017&journal=Artif+Intell&volume=247&doi=10.1016%2Fj.artint.2016.07.002&pages=45-69>Google Scholar</a></div></div></div><div id="ref99" aria-flowto="reference-99-content reference-99-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 99 in the content" id="reference-99-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [99] </div> <div class="circle-list__item__grouped"><div id="reference-99-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Beßler</span>, <span class="given-names">D.</span></span>, <span class="string-name"><span class="surname">Porzel</span>, <span class="given-names">R.</span></span>, <span class="string-name"><span class="surname">Pomarlan</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Beetz</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Malaka</span>, <span class="given-names">R.</span></span> and <span class="string-name"><span class="surname">Bateman</span>, <span class="given-names">J.</span></span>, “<span class="chapter-title">A Formal Model of Affordances for Flexible Robotic Task Execution</span>,” In: <span class="source">ECAI</span>, (<span class="publisher-name">IOS Press</span>, <span class="year">2020</span>) pp. <span class="fpage">2425</span>–<span class="lpage">2432</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for ECAI' href=https://scholar.google.com/scholar_lookup?title=ECAI&author=Be%C3%9Fler+D.&author=Porzel+R.&author=Pomarlan+M.&author=Beetz+M.&author=Malaka+R.&author=Bateman+J.&publication+year=2020&pages=2425-2432>Google Scholar</a></div></div></div><div id="ref100" aria-flowto="reference-100-content reference-100-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 100 in the content" id="reference-100-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [100] </div> <div class="circle-list__item__grouped"><div id="reference-100-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">de la Cruz</span>, <span class="given-names">P.</span></span>, <span class="string-name"><span class="surname">Piater</span>, <span class="given-names">J.</span></span> and <span class="string-name"><span class="surname">Saveriano</span>, <span class="given-names">M.</span></span>, “<span class="article-title">Reconfigurable Behavior Trees: Towards an Executive framework meeting high-level decision making and control layer features</span>,” In: <em class="italic">IEEE International Conference on Systems, Man, and Cybernetics (SMC) 2020</em>, (<span class="publisher-name">IEEE</span>, <span class="year">2020</span>) pp. <span class="fpage">1915</span>–<span class="lpage">1922</span>.<a class='ref-link' target='_blank' aria-label='CrossRef link for Reconfigurable Behavior Trees: Towards an Executive framework meeting high-level decision making and control layer features' href=https://dx.doi.org/10.1109/SMC42975.2020.9282817>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Reconfigurable Behavior Trees: Towards an Executive framework meeting high-level decision making and control layer features' href=https://scholar.google.com/scholar_lookup?title=Reconfigurable+Behavior+Trees%3A+Towards+an+Executive+framework+meeting+high-level+decision+making+and+control+layer+features&author=de+la+Cruz+P.&author=Piater+J.&author=Saveriano+M.&publication+year=2020>Google Scholar</a></div></div></div><div id="ref101" aria-flowto="reference-101-content reference-101-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 101 in the content" id="reference-101-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [101] </div> <div class="circle-list__item__grouped"><div id="reference-101-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Carbone</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Finzi</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Orlandini</span>, <span class="given-names">A.</span></span> and <span class="string-name"><span class="surname">Pirri</span>, <span class="given-names">F.</span></span>, “<span class="article-title">Model-based control architecture for attentive robots in rescue scenarios</span>,” <span class="source">Auton Robot</span> <span class="volume">24</span>(<span class="issue">1</span>), <span class="fpage">87</span>–<span class="lpage">120</span> (<span class="year">2008</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Model-based control architecture for attentive robots in rescue scenarios' href=https://dx.doi.org/10.1007/s10514-007-9055-6>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Model-based control architecture for attentive robots in rescue scenarios' href=https://scholar.google.com/scholar_lookup?title=Model-based+control+architecture+for+attentive+robots+in+rescue+scenarios&author=Carbone+A.&author=Finzi+A.&author=Orlandini+A.&author=Pirri+F.&publication+year=2008&journal=Auton+Robot&volume=24&doi=10.1007%2Fs10514-007-9055-6&pages=87-120>Google Scholar</a></div></div></div><div id="ref102" aria-flowto="reference-102-content reference-102-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 102 in the content" id="reference-102-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [102] </div> <div class="circle-list__item__grouped"><div id="reference-102-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Karpas</span>, <span class="given-names">E.</span></span>, <span class="string-name"><span class="surname">Levine</span>, <span class="given-names">S. J.</span></span>, <span class="string-name"><span class="surname">Yu</span>, <span class="given-names">P.</span></span> and <span class="string-name"><span class="surname">Williams</span>, <span class="given-names">B. C.</span></span>, “<span class="chapter-title">Robust Execution of Plans for Human-Robot Teams</span>,” In: <span class="source">ICAPS-2015</span>, (<span class="year">2015</span>) pp. <span class="fpage">342</span>–<span class="lpage">346</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for ICAPS-2015' href=https://scholar.google.com/scholar_lookup?title=ICAPS-2015&author=Karpas+E.&author=Levine+S.+J.&author=Yu+P.&author=Williams+B.+C.&publication+year=2015&pages=342-346>Google Scholar</a></div></div></div><div id="ref103" aria-flowto="reference-103-content reference-103-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 103 in the content" id="reference-103-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [103] </div> <div class="circle-list__item__grouped"><div id="reference-103-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Botvinick</span>, <span class="given-names">M. M.</span></span>, <span class="string-name"><span class="surname">Braver</span>, <span class="given-names">T. S.</span></span>, <span class="string-name"><span class="surname">Barch</span>, <span class="given-names">D. M.</span></span>, <span class="string-name"><span class="surname">Carter</span>, <span class="given-names">C. S.</span></span> and <span class="string-name"><span class="surname">Cohen</span>, <span class="given-names">J. D.</span></span>, “<span class="article-title">Conflict monitoring and cognitive control</span>,” <span class="source">Psychol Rev</span> <span class="volume">108</span>(<span class="issue">3</span>), <span class="fpage">624</span>–<span class="lpage">652</span> (<span class="year">2001</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Conflict monitoring and cognitive control' href=https://dx.doi.org/10.1037/0033-295X.108.3.624>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Conflict monitoring and cognitive control' href=https://scholar.google.com/scholar_lookup?title=Conflict+monitoring+and+cognitive+control&author=Botvinick+M.+M.&author=Braver+T.+S.&author=Barch+D.+M.&author=Carter+C.+S.&author=Cohen+J.+D.&publication+year=2001&journal=Psychol+Rev&volume=108&doi=10.1037%2F0033-295X.108.3.624&pages=624-652>Google Scholar</a><a class='ref-link' target='_blank' aria-label='PubMed link for Conflict monitoring and cognitive control' href=https://www.ncbi.nlm.nih.gov/pubmed/11488380>PubMed</a></div></div></div><div id="ref104" aria-flowto="reference-104-content reference-104-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><!----></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [104] </div> <div class="circle-list__item__grouped"><div id="reference-104-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Cooper</span>, <span class="given-names">R.</span></span> and <span class="string-name"><span class="surname">Shallice</span>, <span class="given-names">T.</span></span>, “<span class="article-title">Contention scheduling and the control of routine activities</span>,” <span class="source">Cogn Neuropsychol</span> <span class="volume">17</span>(<span class="issue">4</span>), <span class="fpage">297</span>–<span class="lpage">338</span> (<span class="year">2000</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Contention scheduling and the control of routine activities' href=https://dx.doi.org/10.1080/026432900380427>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Contention scheduling and the control of routine activities' href=https://scholar.google.com/scholar_lookup?title=Contention+scheduling+and+the+control+of+routine+activities&author=Cooper+R.&author=Shallice+T.&publication+year=2000&journal=Cogn+Neuropsychol&volume=17&doi=10.1080%2F026432900380427&pages=297-338>Google Scholar</a><a class='ref-link' target='_blank' aria-label='PubMed link for Contention scheduling and the control of routine activities' href=https://www.ncbi.nlm.nih.gov/pubmed/20945185>PubMed</a></div></div></div><div id="ref105" aria-flowto="reference-105-content reference-105-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 105 in the content" id="reference-105-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [105] </div> <div class="circle-list__item__grouped"><div id="reference-105-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Cooper</span>, <span class="given-names">R.</span></span> and <span class="string-name"><span class="surname">Shallice</span>, <span class="given-names">T.</span></span>, “<span class="article-title">Hierarchical schemas and goals in the control of sequential behavior</span>,” <span class="source">Psychol Rev</span> <span class="volume">113</span>(<span class="issue">4</span>), <span class="fpage">887</span>–<span class="lpage">916</span> (<span class="year">2006</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Hierarchical schemas and goals in the control of sequential behavior' href=https://dx.doi.org/10.1037/0033-295X.113.4.887>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Hierarchical schemas and goals in the control of sequential behavior' href=https://scholar.google.com/scholar_lookup?title=Hierarchical+schemas+and+goals+in+the+control+of+sequential+behavior&author=Cooper+R.&author=Shallice+T.&publication+year=2006&journal=Psychol+Rev&volume=113&doi=10.1037%2F0033-295X.113.4.887&pages=887-916>Google Scholar</a><a class='ref-link' target='_blank' aria-label='PubMed link for Hierarchical schemas and goals in the control of sequential behavior' href=https://www.ncbi.nlm.nih.gov/pubmed/17014307>PubMed</a></div></div></div><div id="ref106" aria-flowto="reference-106-content reference-106-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 106 in the content" id="reference-106-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [106] </div> <div class="circle-list__item__grouped"><div id="reference-106-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Petrík</span>, <span class="given-names">V.</span></span>, <span class="string-name"><span class="surname">Tapaswi</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Laptev</span>, <span class="given-names">I.</span></span> and <span class="string-name"><span class="surname">Sivic</span>, <span class="given-names">J.</span></span>, “<span class="article-title">Learning Object Manipulation Skills via Approximate State Estimation from Real Videos</span>,” In: <em class="italic">Conference on Robot Learning</em>, (<span class="publisher-name">PMLR</span>, <span class="year">2021</span>) pp. <span class="fpage">296</span>–<span class="lpage">312</span>.<a class='ref-link' target='_blank' aria-label='CrossRef link for Learning Object Manipulation Skills via Approximate State Estimation from Real Videos' href=https://dx.doi.org/10.1109/IROS47612.2022.9982084>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Learning Object Manipulation Skills via Approximate State Estimation from Real Videos' href=https://scholar.google.com/scholar_lookup?title=Learning+Object+Manipulation+Skills+via+Approximate+State+Estimation+from+Real+Videos&author=Petr%C3%ADk+V.&author=Tapaswi+M.&author=Laptev+I.&author=Sivic+J.&publication+year=2021>Google Scholar</a></div></div></div><div id="ref107" aria-flowto="reference-107-content reference-107-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 107 in the content" id="reference-107-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [107] </div> <div class="circle-list__item__grouped"><div id="reference-107-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Ramirez-Amaro</span>, <span class="given-names">K.</span></span>, <span class="string-name"><span class="surname">Yang</span>, <span class="given-names">Y.</span></span> and <span class="string-name"><span class="surname">Cheng</span>, <span class="given-names">G.</span></span>, “<span class="article-title">A Survey on Semantic-Based Methods for the Understanding of Human Movements</span>,” <span class="source">Robot Auton Syst</span> <span class="volume">119</span>, <span class="fpage">31</span>–<span class="lpage">50</span> (<span class="year">2019</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for A Survey on Semantic-Based Methods for the Understanding of Human Movements' href=https://dx.doi.org/10.1016/j.robot.2019.05.013>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for A Survey on Semantic-Based Methods for the Understanding of Human Movements' href=https://scholar.google.com/scholar_lookup?title=A+Survey+on+Semantic-Based+Methods+for+the+Understanding+of+Human+Movements&author=Ramirez-Amaro+K.&author=Yang+Y.&author=Cheng+G.&publication+year=2019&journal=Robot+Auton+Syst&volume=119&doi=10.1016%2Fj.robot.2019.05.013&pages=31-50>Google Scholar</a></div></div></div><div id="ref108" aria-flowto="reference-108-content reference-108-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 108 in the content" id="reference-108-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [108] </div> <div class="circle-list__item__grouped"><div id="reference-108-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Mansouri</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Pecora</span>, <span class="given-names">F.</span></span> and <span class="string-name"><span class="surname">Schüller</span>, <span class="given-names">P.</span></span>, “<span class="article-title">Combining task and motion planning: Challenges and guidelines</span>,” <span class="source">Front Robot AI</span> <span class="volume">8</span>, <span class="fpage">637888</span> (<span class="year">2021</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Combining task and motion planning: Challenges and guidelines' href=https://dx.doi.org/10.3389/frobt.2021.637888>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Combining task and motion planning: Challenges and guidelines' href=https://scholar.google.com/scholar_lookup?title=Combining+task+and+motion+planning%3A+Challenges+and+guidelines&author=Mansouri+M.&author=Pecora+F.&author=Sch%C3%BCller+P.&publication+year=2021&journal=Front+Robot+AI&volume=8&doi=10.3389%2Ffrobt.2021.637888>Google Scholar</a><a class='ref-link' target='_blank' aria-label='PubMed link for Combining task and motion planning: Challenges and guidelines' href=https://www.ncbi.nlm.nih.gov/pubmed/34095239>PubMed</a></div></div></div><div id="ref109" aria-flowto="reference-109-content reference-109-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 109 in the content" id="reference-109-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [109] </div> <div class="circle-list__item__grouped"><div id="reference-109-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Cacace</span>, <span class="given-names">J.</span></span>, <span class="string-name"><span class="surname">Caccavale</span>, <span class="given-names">R.</span></span>, <span class="string-name"><span class="surname">Finzi</span>, <span class="given-names">A.</span></span> and <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span>, “<span class="article-title">Attentional Multimodal Interface for Multidrone Search in the Alps</span>,” In: <em class="italic">IEEE international conference on systems, man, and cybernetics (SMC)</em>, <span class="publisher-name">IEEE</span> (<span class="year">2016</span>) pp. <span class="fpage">001178</span>–<span class="lpage">001183</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Attentional Multimodal Interface for Multidrone Search in the Alps' href=https://scholar.google.com/scholar_lookup?title=Attentional+Multimodal+Interface+for+Multidrone+Search+in+the+Alps&author=Cacace+J.&author=Caccavale+R.&author=Finzi+A.&author=Lippiello+V.&publication+year=2016>Google Scholar</a></div></div></div><div id="ref110" aria-flowto="reference-110-content reference-110-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 110 in the content" id="reference-110-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [110] </div> <div class="circle-list__item__grouped"><div id="reference-110-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Caccavale</span>, <span class="given-names">R.</span></span>, <span class="string-name"><span class="surname">Cacace</span>, <span class="given-names">J.</span></span>, <span class="string-name"><span class="surname">Fiore</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Alami</span>, <span class="given-names">R.</span></span> and <span class="string-name"><span class="surname">Finzi</span>, <span class="given-names">A.</span></span>, “<span class="article-title">Attentional Supervision of Human-Robot Collaborative Plans</span>,” In: <em class="italic">25th IEEE International Symposium on Robot and Human Interactive Communication (RO_MAN)</em>, <span class="publisher-name">IEEE</span> (<span class="year">2016</span>) pp. <span class="fpage">867</span>–<span class="lpage">873</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Attentional Supervision of Human-Robot Collaborative Plans' href=https://scholar.google.com/scholar_lookup?title=Attentional+Supervision+of+Human-Robot+Collaborative+Plans&author=Caccavale+R.&author=Cacace+J.&author=Fiore+M.&author=Alami+R.&author=Finzi+A.&publication+year=2016>Google Scholar</a></div></div></div><div id="ref111" aria-flowto="reference-111-content reference-111-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 111 in the content" id="reference-111-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [111] </div> <div class="circle-list__item__grouped"><div id="reference-111-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Caccavale</span>, <span class="given-names">R.</span></span> and <span class="string-name"><span class="surname">Finzi</span>, <span class="given-names">A.</span></span>, “<span class="article-title">Plan Execution and Attentional Regulations for Flexible Human-Robot Interaction</span>,” In: <em class="italic">IEEE International Conference on Systems, Man, and Cybernetics 2015</em>, <span class="publisher-name">IEEE</span> (<span class="year">2015</span>) pp. <span class="fpage">2453</span>–<span class="lpage">2458</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Plan Execution and Attentional Regulations for Flexible Human-Robot Interaction' href=https://scholar.google.com/scholar_lookup?title=Plan+Execution+and+Attentional+Regulations+for+Flexible+Human-Robot+Interaction&author=Caccavale+R.&author=Finzi+A.&publication+year=2015>Google Scholar</a></div></div></div><div id="ref112" aria-flowto="reference-112-content reference-112-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 112 in the content" id="reference-112-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [112] </div> <div class="circle-list__item__grouped"><div id="reference-112-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Caccavale</span>, <span class="given-names">R.</span></span> and <span class="string-name"><span class="surname">Finzi</span>, <span class="given-names">A.</span></span>, “<span class="article-title">Flexible task execution and attentional regulations in human-robot interaction</span>,” <span class="source">IEEE Trans Cogn Develp Syst</span> <span class="volume">9</span>(<span class="issue">1</span>), <span class="fpage">68</span>–<span class="lpage">79</span> (<span class="year">2016</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Flexible task execution and attentional regulations in human-robot interaction' href=https://dx.doi.org/10.1109/TCDS.2016.2614690>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Flexible task execution and attentional regulations in human-robot interaction' href=https://scholar.google.com/scholar_lookup?title=Flexible+task+execution+and+attentional+regulations+in+human-robot+interaction&author=Caccavale+R.&author=Finzi+A.&publication+year=2016&journal=IEEE+Trans+Cogn+Develp+Syst&volume=9&doi=10.1109%2FTCDS.2016.2614690&pages=68-79>Google Scholar</a></div></div></div><div id="ref113" aria-flowto="reference-113-content reference-113-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 113 in the content" id="reference-113-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [113] </div> <div class="circle-list__item__grouped"><div id="reference-113-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Caccavale</span>, <span class="given-names">R.</span></span> and <span class="string-name"><span class="surname">Finzi</span>, <span class="given-names">A.</span></span>, “<span class="chapter-title">Toward a Cognitive Control Framework for Explainable Robotics</span>,” In: <span class="source">Human-Friendly Robotics 2020: 13th International Workshop</span>, (<span class="publisher-name">Springer</span>, <span class="year">2021</span>)<span class="fpage">46</span>–<span class="lpage">58</span>.<a class='ref-link' target='_blank' aria-label='CrossRef link for Human-Friendly Robotics 2020: 13th International Workshop' href=https://dx.doi.org/10.1007/978-3-030-71356-0_4>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Human-Friendly Robotics 2020: 13th International Workshop' href=https://scholar.google.com/scholar_lookup?title=Human-Friendly+Robotics+2020%3A+13th+International+Workshop&author=Caccavale+R.&author=Finzi+A.&publication+year=2021&pages=46-58>Google Scholar</a></div></div></div><div id="ref114" aria-flowto="reference-114-content reference-114-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 114 in the content" id="reference-114-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [114] </div> <div class="circle-list__item__grouped"><div id="reference-114-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Caccavale</span>, <span class="given-names">R.</span></span> and <span class="string-name"><span class="surname">Finzi</span>, <span class="given-names">A.</span></span>, “<span class="article-title">A robotic cognitive control framework for collaborative task execution and learning</span>,” <span class="source">Top Cogn Sci</span> <span class="volume">14</span>(<span class="issue">2</span>), <span class="fpage">327</span>–<span class="lpage">343</span>(<span class="year">2022</span>b).<a class='ref-link' target='_blank' aria-label='CrossRef link for A robotic cognitive control framework for collaborative task execution and learning' href=https://dx.doi.org/10.1111/tops.12587>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for A robotic cognitive control framework for collaborative task execution and learning' href=https://scholar.google.com/scholar_lookup?title=A+robotic+cognitive+control+framework+for+collaborative+task+execution+and+learning&author=Caccavale+R.&author=Finzi+A.&publication+year=2022&journal=Top+Cogn+Sci&volume=14&doi=10.1111%2Ftops.12587&pages=327-343>Google Scholar</a><a class='ref-link' target='_blank' aria-label='PubMed link for A robotic cognitive control framework for collaborative task execution and learning' href=https://www.ncbi.nlm.nih.gov/pubmed/34826350>PubMed</a></div></div></div><div id="ref115" aria-flowto="reference-115-content reference-115-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 115 in the content" id="reference-115-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [115] </div> <div class="circle-list__item__grouped"><div id="reference-115-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Caccavale</span>, <span class="given-names">R.</span></span>, <span class="string-name"><span class="surname">Leone</span>, <span class="given-names">E.</span></span>, <span class="string-name"><span class="surname">Lucignano</span>, <span class="given-names">L.</span></span>, <span class="string-name"><span class="surname">Rossi</span>, <span class="given-names">S.</span></span>, <span class="string-name"><span class="surname">Staffa</span>, <span class="given-names">M.</span></span> and <span class="string-name"><span class="surname">Finzi</span>, <span class="given-names">A.</span></span>, “<span class="article-title">Attentional Regulations in a Situated Human-Robot Dialogue</span>,” In: <em class="italic">The 23rd IEEE International Symposium on Robot and Human Interactive Communication</em>, <span class="publisher-name">IEEE</span> (<span class="year">2014</span>) pp. <span class="fpage">844</span>–<span class="lpage">849</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Attentional Regulations in a Situated Human-Robot Dialogue' href=https://scholar.google.com/scholar_lookup?title=Attentional+Regulations+in+a+Situated+Human-Robot+Dialogue&author=Caccavale+R.&author=Leone+E.&author=Lucignano+L.&author=Rossi+S.&author=Staffa+M.&author=Finzi+A.&publication+year=2014>Google Scholar</a></div></div></div><div id="ref116" aria-flowto="reference-116-content reference-116-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 116 in the content" id="reference-116-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [116] </div> <div class="circle-list__item__grouped"><div id="reference-116-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Caccavale</span>, <span class="given-names">R.</span></span> and <span class="string-name"><span class="surname">Finzi</span>, <span class="given-names">A.</span></span>, “<span class="article-title">Learning attentional regulations for structured tasks execution in robotic cognitive control</span>,” <span class="source">Auton Robot</span> <span class="volume">43</span>, <span class="fpage">2229</span>–<span class="lpage">2243</span> (<span class="year">2019</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Learning attentional regulations for structured tasks execution in robotic cognitive control' href=https://dx.doi.org/10.1007/s10514-019-09876-x>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Learning attentional regulations for structured tasks execution in robotic cognitive control' href=https://scholar.google.com/scholar_lookup?title=Learning+attentional+regulations+for+structured+tasks+execution+in+robotic+cognitive+control&author=Caccavale+R.&author=Finzi+A.&publication+year=2019&journal=Auton+Robot&volume=43&doi=10.1007%2Fs10514-019-09876-x&pages=2229-2243>Google Scholar</a></div></div></div><div id="ref117" aria-flowto="reference-117-content reference-117-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 117 in the content" id="reference-117-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [117] </div> <div class="circle-list__item__grouped"><div id="reference-117-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Caccavale</span>, <span class="given-names">R.</span></span>, <span class="string-name"><span class="surname">Saveriano</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Finzi</span>, <span class="given-names">A.</span></span> and <span class="string-name"><span class="surname">Lee</span>, <span class="given-names">D.</span></span>, “<span class="article-title">Kinesthetic teaching and attentional supervision of structured tasks in human–robot interaction</span>,” <span class="source">Auton Robot</span> <span class="volume">43</span>(<span class="issue">6</span>), <span class="fpage">1291</span>–<span class="lpage">1307</span> (<span class="year">2019</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Kinesthetic teaching and attentional supervision of structured tasks in human–robot interaction' href=https://dx.doi.org/10.1007/s10514-018-9706-9>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Kinesthetic teaching and attentional supervision of structured tasks in human–robot interaction' href=https://scholar.google.com/scholar_lookup?title=Kinesthetic+teaching+and+attentional+supervision+of+structured+tasks+in+human%E2%80%93robot+interaction&author=Caccavale+R.&author=Saveriano+M.&author=Finzi+A.&author=Lee+D.&publication+year=2019&journal=Auton+Robot&volume=43&doi=10.1007%2Fs10514-018-9706-9&pages=1291-1307>Google Scholar</a></div></div></div><div id="ref118" aria-flowto="reference-118-content reference-118-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 118 in the content" id="reference-118-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [118] </div> <div class="circle-list__item__grouped"><div id="reference-118-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Caccavale</span>, <span class="given-names">R.</span></span>, <span class="string-name"><span class="surname">Saveriano</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Fontanelli</span>, <span class="given-names">G. A.</span></span>, <span class="string-name"><span class="surname">Ficuciello</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Lee</span>, <span class="given-names">D.</span></span> and <span class="string-name"><span class="surname">Finzi</span>, <span class="given-names">A.</span></span>, “<span class="article-title">Imitation Learning and Attentional Supervision of Dual-Arm Structured Tasks</span>,” In: <em class="italic">Joint IEEE International Conference on Development and Learning and Epigenetic Robotics (ICDL-EpiRob)</em>, <span class="publisher-name">IEEE</span> (<span class="year">2017</span>) pp. <span class="fpage">66</span>–<span class="lpage">71</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Imitation Learning and Attentional Supervision of Dual-Arm Structured Tasks' href=https://scholar.google.com/scholar_lookup?title=Imitation+Learning+and+Attentional+Supervision+of+Dual-Arm+Structured+Tasks&author=Caccavale+R.&author=Saveriano+M.&author=Fontanelli+G.+A.&author=Ficuciello+F.&author=Lee+D.&author=Finzi+A.&publication+year=2017>Google Scholar</a></div></div></div><div id="ref119" aria-flowto="reference-119-content reference-119-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 119 in the content" id="reference-119-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [119] </div> <div class="circle-list__item__grouped"><div id="reference-119-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Caccavale</span>, <span class="given-names">R.</span></span>, <span class="string-name"><span class="surname">Ermini</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Fedeli</span>, <span class="given-names">E.</span></span>, <span class="string-name"><span class="surname">Finzi</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span> and <span class="string-name"><span class="surname">Tavano</span>, <span class="given-names">F.</span></span>, “<span class="article-title">A multi-robot deep Q-learning framework for priority-based sanitization of railway stations</span>,” <span class="source">Appl Intell</span> <span class="volume">53</span>, <span class="fpage">20595</span>–<span class="lpage">20613</span> (<span class="year">2023</span>a)<a class='ref-link' target='_blank' aria-label='CrossRef link for A multi-robot deep Q-learning framework for priority-based sanitization of railway stations' href=https://dx.doi.org/10.1007/s10489-023-04529-0>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for A multi-robot deep Q-learning framework for priority-based sanitization of railway stations' href=https://scholar.google.com/scholar_lookup?title=A+multi-robot+deep+Q-learning+framework+for+priority-based+sanitization+of+railway+stations&author=Caccavale+R.&author=Ermini+M.&author=Fedeli+E.&author=Finzi+A.&author=Lippiello+V.&author=Tavano+F.&publication+year=2023&journal=Appl+Intell&volume=53&doi=10.1007%2Fs10489-023-04529-0&pages=20595-20613>Google Scholar</a></div></div></div><div id="ref120" aria-flowto="reference-120-content reference-120-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 120 in the content" id="reference-120-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [120] </div> <div class="circle-list__item__grouped"><div id="reference-120-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Caccavale</span>, <span class="given-names">R.</span></span>, <span class="string-name"><span class="surname">Ermini</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Fedeli</span>, <span class="given-names">E.</span></span>, <span class="string-name"><span class="surname">Finzi</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span> and <span class="string-name"><span class="surname">Tavano</span>, <span class="given-names">F.</span></span>, “<span class="chapter-title">Toward a Heterogeneous Multi-Robot Framework for Priority-Based Sanitization of Railway Stations</span>,” In: <span class="source">AIxIA 2022–Advances in Artificial Intelligence: XXIst International Conference of the Italian Association for Artificial Intelligence, AIxIA 2022</span>. vol. <span class="volume">2023b</span> (<span class="publisher-name">Springer</span>, <span class="publisher-loc">Udine, Italy</span>, <span class="year">2022</span>) pp. <span class="fpage">387</span>–<span class="lpage">401</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for AIxIA 2022–Advances in Artificial Intelligence: XXIst International Conference of the Italian Association for Artificial Intelligence, AIxIA 2022' href=https://scholar.google.com/scholar_lookup?title=AIxIA+2022%E2%80%93Advances+in+Artificial+Intelligence%3A+XXIst+International+Conference+of+the+Italian+Association+for+Artificial+Intelligence%2C+AIxIA+2022&author=Caccavale+R.&author=Ermini+M.&author=Fedeli+E.&author=Finzi+A.&author=Lippiello+V.&author=Tavano+F.&publication+year=2022&pages=387-401>Google Scholar</a></div></div></div><div id="ref121" aria-flowto="reference-121-content reference-121-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 121 in the content" id="reference-121-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [121] </div> <div class="circle-list__item__grouped"><div id="reference-121-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Caccavale</span>, <span class="given-names">R.</span></span> and <span class="string-name"><span class="surname">Finzi</span>, <span class="given-names">A.</span></span>, “<span class="article-title">A rapidly-exploring random trees approach to combined task and motion planning</span>,” <span class="source">Robot Auton Syst</span> <span class="volume">157</span>, <span class="fpage">104238</span> (<span class="year">2022</span>a).<a class='ref-link' target='_blank' aria-label='CrossRef link for A rapidly-exploring random trees approach to combined task and motion planning' href=https://dx.doi.org/10.1016/j.robot.2022.104238>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for A rapidly-exploring random trees approach to combined task and motion planning' href=https://scholar.google.com/scholar_lookup?title=A+rapidly-exploring+random+trees+approach+to+combined+task+and+motion+planning&author=Caccavale+R.&author=Finzi+A.&publication+year=2022&journal=Robot+Auton+Syst&volume=157&doi=10.1016%2Fj.robot.2022.104238>Google Scholar</a></div></div></div><div id="ref122" aria-flowto="reference-122-content reference-122-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 122 in the content" id="reference-122-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [122] </div> <div class="circle-list__item__grouped"><div id="reference-122-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Norman</span>, <span class="given-names">D. A.</span></span> and <span class="string-name"><span class="surname">Shallice</span>, <span class="given-names">T.</span></span>, <span class="article-title">Attention to action: Willed and automatic control of behavior</span>, <span class="volume">4</span> (<span class="year">1986</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Attention to action: Willed and automatic control of behavior' href=https://dx.doi.org/10.1007/978-1-4757-0629-1_1>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Attention to action: Willed and automatic control of behavior' href=https://scholar.google.com/scholar_lookup?title=Attention+to+action%3A+Willed+and+automatic+control+of+behavior&author=Norman+D.+A.&author=Shallice+T.&publication+year=1986>Google Scholar</a></div></div></div><div id="ref123" aria-flowto="reference-123-content reference-123-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 123 in the content" id="reference-123-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [123] </div> <div class="circle-list__item__grouped"><div id="reference-123-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Echelmeyer</span>, <span class="given-names">W.</span></span>, <span class="string-name"><span class="surname">Kirchheim</span>, <span class="given-names">A.</span></span> and <span class="string-name"><span class="surname">Wellbrock</span>, <span class="given-names">E.</span></span>, “<span class="article-title">Robotics-Logistics: Challenges for Automation of Logistic Processes</span>,” In: <em class="italic">IEEE International Conference on Automation and Logistics 2008</em>, (<span class="publisher-name">IEEE</span>, <span class="year">2008</span>) pp. <span class="fpage">2099</span>–<span class="lpage">2103</span>.<a class='ref-link' target='_blank' aria-label='CrossRef link for Robotics-Logistics: Challenges for Automation of Logistic Processes' href=https://dx.doi.org/10.1109/ICAL.2008.4636510>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Robotics-Logistics: Challenges for Automation of Logistic Processes' href=https://scholar.google.com/scholar_lookup?title=Robotics-Logistics%3A+Challenges+for+Automation+of+Logistic+Processes&author=Echelmeyer+W.&author=Kirchheim+A.&author=Wellbrock+E.&publication+year=2008>Google Scholar</a></div></div></div><div id="ref124" aria-flowto="reference-124-content reference-124-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 124 in the content" id="reference-124-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [124] </div> <div class="circle-list__item__grouped"><div id="reference-124-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Sakamoto</span>, <span class="given-names">T.</span></span>, <span class="string-name"><span class="surname">Harada</span>, <span class="given-names">K.</span></span> and <span class="string-name"><span class="surname">Wan</span>, <span class="given-names">W.</span></span>, “<span class="article-title">Real-time planning robotic palletizing tasks using reusable roadmaps</span>,” <span class="source">J Robot, Network Art Life</span> <span class="volume">6</span>(<span class="issue">4</span>), <span class="fpage">240</span>–<span class="lpage">245</span> (<span class="year">2020</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Real-time planning robotic palletizing tasks using reusable roadmaps' href=https://dx.doi.org/10.2991/jrnal.k.200222.009>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Real-time planning robotic palletizing tasks using reusable roadmaps' href=https://scholar.google.com/scholar_lookup?title=Real-time+planning+robotic+palletizing+tasks+using+reusable+roadmaps&author=Sakamoto+T.&author=Harada+K.&author=Wan+W.&publication+year=2020&journal=J+Robot%2C+Network+Art+Life&volume=6&doi=10.2991%2Fjrnal.k.200222.009&pages=240-245>Google Scholar</a></div></div></div><div id="ref125" aria-flowto="reference-125-content reference-125-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 125 in the content" id="reference-125-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [125] </div> <div class="circle-list__item__grouped"><div id="reference-125-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Jocas</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Kurrek</span>, <span class="given-names">P.</span></span>, <span class="string-name"><span class="surname">Zoghlami</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Gianni</span>, <span class="given-names">M.</span></span> and <span class="string-name"><span class="surname">Salehi</span>, <span class="given-names">V.</span></span>. <span class="article-title">Ai-Based Learning Approach with Consideration of Safety Criteria on Example of a Depalletization Robot</span>. In: <em class="italic">Proceedings of the Design Society: International Conference on Engineering Design</em>, (<span class="publisher-name">Cambridge University Press</span>, <span class="year">2019</span>) pp. <span class="fpage">2041</span>–<span class="lpage">2050</span>.<a class='ref-link' target='_blank' aria-label='CrossRef link for Ai-Based Learning Approach with Consideration of Safety Criteria on Example of a Depalletization Robot' href=https://dx.doi.org/10.1017/dsi.2019.210>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Ai-Based Learning Approach with Consideration of Safety Criteria on Example of a Depalletization Robot' href=https://scholar.google.com/scholar_lookup?title=Ai-Based+Learning+Approach+with+Consideration+of+Safety+Criteria+on+Example+of+a+Depalletization+Robot&author=Jocas+M.&author=Kurrek+P.&author=Zoghlami+F.&author=Gianni+M.&author=Salehi+V.&publication+year=2019>Google Scholar</a></div></div></div><div id="ref126" aria-flowto="reference-126-content reference-126-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 126 in the content" id="reference-126-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [126] </div> <div class="circle-list__item__grouped"><div id="reference-126-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Nakamoto</span>, <span class="given-names">H.</span></span>, <span class="string-name"><span class="surname">Eto</span>, <span class="given-names">H.</span></span>, <span class="string-name"><span class="surname">Sonoura</span>, <span class="given-names">T.</span></span>, <span class="string-name"><span class="surname">Tanaka</span>, <span class="given-names">J.</span></span> and <span class="string-name"><span class="surname">Ogawa</span>, <span class="given-names">A.</span></span>. <span class="article-title">High-Speed and Compact Depalletizing Robot Capable of Handling Packages Stacked Complicatedly</span>. In: <em class="italic">IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)</em>, (<span class="publisher-name">IEEE</span>, <span class="year">2016</span>) pp. <span class="fpage">344</span>–<span class="lpage">349</span>.<a class='ref-link' target='_blank' aria-label='CrossRef link for High-Speed and Compact Depalletizing Robot Capable of Handling Packages Stacked Complicatedly' href=https://dx.doi.org/10.1109/IROS.2016.7759077>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for High-Speed and Compact Depalletizing Robot Capable of Handling Packages Stacked Complicatedly' href=https://scholar.google.com/scholar_lookup?title=High-Speed+and+Compact+Depalletizing+Robot+Capable+of+Handling+Packages+Stacked+Complicatedly&author=Nakamoto+H.&author=Eto+H.&author=Sonoura+T.&author=Tanaka+J.&author=Ogawa+A.&publication+year=2016>Google Scholar</a></div></div></div><div id="ref127" aria-flowto="reference-127-content reference-127-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 127 in the content" id="reference-127-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [127] </div> <div class="circle-list__item__grouped"><div id="reference-127-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Schwarz</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Milan</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Periyasamy</span>, <span class="given-names">A. S.</span></span> and <span class="string-name"><span class="surname">Behnke</span>, <span class="given-names">S.</span></span>, “<span class="article-title">Rgb-d object detection and semantic segmentation for autonomous manipulation in clutter</span>,” <span class="source">Int J Robot Res</span> <span class="volume">37</span>(<span class="issue">4-5</span>), <span class="fpage">437</span>–<span class="lpage">451</span> (<span class="year">2018</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Rgb-d object detection and semantic segmentation for autonomous manipulation in clutter' href=https://dx.doi.org/10.1177/0278364917713117>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Rgb-d object detection and semantic segmentation for autonomous manipulation in clutter' href=https://scholar.google.com/scholar_lookup?title=Rgb-d+object+detection+and+semantic+segmentation+for+autonomous+manipulation+in+clutter&author=Schwarz+M.&author=Milan+A.&author=Periyasamy+A.+S.&author=Behnke+S.&publication+year=2018&journal=Int+J+Robot+Res&volume=37&doi=10.1177%2F0278364917713117&pages=437-451>Google Scholar</a></div></div></div><div id="ref128" aria-flowto="reference-128-content reference-128-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 128 in the content" id="reference-128-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [128] </div> <div class="circle-list__item__grouped"><div id="reference-128-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Katsoulas</span>, <span class="given-names">D.</span></span> and <span class="string-name"><span class="surname">Kosmopoulos</span>, <span class="given-names">D.</span></span>, “<span class="article-title">An Efficient Depalletizing System Based on 2d Range Imagery</span>,” In: <em class="italic">Proceedings 2001 ICRA. IEEE International Conference on Robotics and Automation (Cat. No.01CH37164)</em>, (<span class="year">2001</span>) pp. <span class="fpage">305</span>–<span class="lpage">312</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for An Efficient Depalletizing System Based on 2d Range Imagery' href=https://scholar.google.com/scholar_lookup?title=An+Efficient+Depalletizing+System+Based+on+2d+Range+Imagery&author=Katsoulas+D.&author=Kosmopoulos+D.&publication+year=2001>Google Scholar</a></div></div></div><div id="ref129" aria-flowto="reference-129-content reference-129-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 129 in the content" id="reference-129-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [129] </div> <div class="circle-list__item__grouped"><div id="reference-129-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Krug</span>, <span class="given-names">R.</span></span>, <span class="string-name"><span class="surname">Stoyanov</span>, <span class="given-names">T.</span></span>, <span class="string-name"><span class="surname">Tincani</span>, <span class="given-names">V.</span></span>, <span class="string-name"><span class="surname">Andreasson</span>, <span class="given-names">H.</span></span>, <span class="string-name"><span class="surname">Mosberger</span>, <span class="given-names">R.</span></span>, <span class="string-name"><span class="surname">Fantoni</span>, <span class="given-names">G.</span></span> and <span class="string-name"><span class="surname">Lilienthal</span>, <span class="given-names">A. J.</span></span>, “<span class="article-title">The next step in robot commissioning: Autonomous picking and palletizing</span>,” <span class="source">IEEE Robot Auto Lett</span> <span class="volume">1</span>(<span class="issue">1</span>), <span class="fpage">546</span>–<span class="lpage">553</span> (<span class="year">2016</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for The next step in robot commissioning: Autonomous picking and palletizing' href=https://dx.doi.org/10.1109/LRA.2016.2519944>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for The next step in robot commissioning: Autonomous picking and palletizing' href=https://scholar.google.com/scholar_lookup?title=The+next+step+in+robot+commissioning%3A+Autonomous+picking+and+palletizing&author=Krug+R.&author=Stoyanov+T.&author=Tincani+V.&author=Andreasson+H.&author=Mosberger+R.&author=Fantoni+G.&author=Lilienthal+A.+J.&publication+year=2016&journal=IEEE+Robot+Auto+Lett&volume=1&doi=10.1109%2FLRA.2016.2519944&pages=546-553>Google Scholar</a></div></div></div><div id="ref130" aria-flowto="reference-130-content reference-130-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 130 in the content" id="reference-130-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [130] </div> <div class="circle-list__item__grouped"><div id="reference-130-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Tanaka</span>, <span class="given-names">J.</span></span>, <span class="string-name"><span class="surname">Ogawa</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Nakamoto</span>, <span class="given-names">H.</span></span>, <span class="string-name"><span class="surname">Sonoura</span>, <span class="given-names">T.</span></span> and <span class="string-name"><span class="surname">Eto</span>, <span class="given-names">H.</span></span>, “<span class="article-title">Suction pad unit using a bellows pneumatic actuator as a support mechanism for an end effector of depalletizing robots</span>,” <span class="source">ROBOMECH Journal</span> <span class="volume">7</span>(<span class="issue">1</span>), <span class="fpage">2</span> (<span class="year">2020</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Suction pad unit using a bellows pneumatic actuator as a support mechanism for an end effector of depalletizing robots' href=https://dx.doi.org/10.1186/s40648-019-0151-0>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Suction pad unit using a bellows pneumatic actuator as a support mechanism for an end effector of depalletizing robots' href=https://scholar.google.com/scholar_lookup?title=Suction+pad+unit+using+a+bellows+pneumatic+actuator+as+a+support+mechanism+for+an+end+effector+of+depalletizing+robots&author=Tanaka+J.&author=Ogawa+A.&author=Nakamoto+H.&author=Sonoura+T.&author=Eto+H.&publication+year=2020&journal=ROBOMECH+Journal&volume=7&doi=10.1186%2Fs40648-019-0151-0>Google Scholar</a></div></div></div><div id="ref131" aria-flowto="reference-131-content reference-131-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 131 in the content" id="reference-131-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [131] </div> <div class="circle-list__item__grouped"><div id="reference-131-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Moura</span>, <span class="given-names">F. M.</span></span> and <span class="string-name"><span class="surname">Silva</span>, <span class="given-names">M. F.</span></span>. <span class="article-title">Application for Automatic Programming of Palletizing Robots</span>. In: <em class="italic">IEEE International Conference on Autonomous Robot Systems and Competitions (ICARSC) 2018</em>, (<span class="publisher-name">IEEE</span>, <span class="year">2018</span>) pp. <span class="fpage">48</span>–<span class="lpage">53</span>.<a class='ref-link' target='_blank' aria-label='CrossRef link for Application for Automatic Programming of Palletizing Robots' href=https://dx.doi.org/10.1109/ICARSC.2018.8374159>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Application for Automatic Programming of Palletizing Robots' href=https://scholar.google.com/scholar_lookup?title=Application+for+Automatic+Programming+of+Palletizing+Robots&author=Moura+F.+M.&author=Silva+M.+F.&publication+year=2018>Google Scholar</a></div></div></div><div id="ref132" aria-flowto="reference-132-content reference-132-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 132 in the content" id="reference-132-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [132] </div> <div class="circle-list__item__grouped"><div id="reference-132-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Caccavale</span>, <span class="given-names">R.</span></span>, <span class="string-name"><span class="surname">Arpenti</span>, <span class="given-names">P.</span></span>, <span class="string-name"><span class="surname">Paduano</span>, <span class="given-names">G.</span></span>, <span class="string-name"><span class="surname">Fontanellli</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span>, <span class="string-name"><span class="surname">Villani</span>, <span class="given-names">L.</span></span> and <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span>, “<span class="article-title">A flexible robotic depalletizing system for supermarket logistics</span>,” <span class="source">IEEE Robot Auto Lett</span> <span class="volume">5</span>(<span class="issue">3</span>), <span class="fpage">4471</span>–<span class="lpage">4476</span> (<span class="year">2020</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for A flexible robotic depalletizing system for supermarket logistics' href=https://dx.doi.org/10.1109/LRA.2020.3000427>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for A flexible robotic depalletizing system for supermarket logistics' href=https://scholar.google.com/scholar_lookup?title=A+flexible+robotic+depalletizing+system+for+supermarket+logistics&author=Caccavale+R.&author=Arpenti+P.&author=Paduano+G.&author=Fontanellli+A.&author=Lippiello+V.&author=Villani+L.&author=Siciliano+B.&publication+year=2020&journal=IEEE+Robot+Auto+Lett&volume=5&doi=10.1109%2FLRA.2020.3000427&pages=4471-4476>Google Scholar</a></div></div></div><div id="ref133" aria-flowto="reference-133-content reference-133-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 133 in the content" id="reference-133-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [133] </div> <div class="circle-list__item__grouped"><div id="reference-133-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Arpenti</span>, <span class="given-names">P.</span></span>, <span class="string-name"><span class="surname">Caccavale</span>, <span class="given-names">R.</span></span>, <span class="string-name"><span class="surname">Paduano</span>, <span class="given-names">G.</span></span>, <span class="string-name"><span class="surname">Fontanelli</span>, <span class="given-names">G. A.</span></span>, <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span>, <span class="string-name"><span class="surname">Villani</span>, <span class="given-names">L.</span></span> and <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span>, “<span class="article-title">Rgb-d recognition and localization of cases for robotic depalletizing in supermarkets</span>,” <span class="source">IEEE Robotics and Automation Letters</span> <span class="volume">5</span>(<span class="issue">4</span>), <span class="fpage">6233</span>–<span class="lpage">6238</span> (<span class="year">2020</span>a).<a class='ref-link' target='_blank' aria-label='CrossRef link for Rgb-d recognition and localization of cases for robotic depalletizing in supermarkets' href=https://dx.doi.org/10.1109/LRA.2020.3013936>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Rgb-d recognition and localization of cases for robotic depalletizing in supermarkets' href=https://scholar.google.com/scholar_lookup?title=Rgb-d+recognition+and+localization+of+cases+for+robotic+depalletizing+in+supermarkets&author=Arpenti+P.&author=Caccavale+R.&author=Paduano+G.&author=Fontanelli+G.+A.&author=Lippiello+V.&author=Villani+L.&author=Siciliano+B.&publication+year=2020&journal=IEEE+Robotics+and+Automation+Letters&volume=5&doi=10.1109%2FLRA.2020.3013936&pages=6233-6238>Google Scholar</a></div></div></div><div id="ref134" aria-flowto="reference-134-content reference-134-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 134 in the content" id="reference-134-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [134] </div> <div class="circle-list__item__grouped"><div id="reference-134-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Fontanelli</span>, <span class="given-names">G. A.</span></span>, <span class="string-name"><span class="surname">Paduano</span>, <span class="given-names">G.</span></span>, <span class="string-name"><span class="surname">Caccavale</span>, <span class="given-names">R.</span></span>, <span class="string-name"><span class="surname">Arpenti</span>, <span class="given-names">P.</span></span>, <span class="string-name"><span class="surname">Lippiello</span>, <span class="given-names">V.</span></span>, <span class="string-name"><span class="surname">Villani</span>, <span class="given-names">L.</span></span> and <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span>, “<span class="article-title">A reconfigurable gripper for robotic autonomous depalletizing in supermarket logistics</span>,” <span class="source">IEEE Robot Autom Lett</span> <span class="volume">5</span>(<span class="issue">3</span>), <span class="fpage">4612</span>–<span class="lpage">4617</span> (<span class="year">2020</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for A reconfigurable gripper for robotic autonomous depalletizing in supermarket logistics' href=https://dx.doi.org/10.1109/LRA.2020.3003283>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for A reconfigurable gripper for robotic autonomous depalletizing in supermarket logistics' href=https://scholar.google.com/scholar_lookup?title=A+reconfigurable+gripper+for+robotic+autonomous+depalletizing+in+supermarket+logistics&author=Fontanelli+G.+A.&author=Paduano+G.&author=Caccavale+R.&author=Arpenti+P.&author=Lippiello+V.&author=Villani+L.&author=Siciliano+B.&publication+year=2020&journal=IEEE+Robot+Autom+Lett&volume=5&doi=10.1109%2FLRA.2020.3003283&pages=4612-4617>Google Scholar</a></div></div></div><div id="ref135" aria-flowto="reference-135-content reference-135-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 135 in the content" id="reference-135-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [135] </div> <div class="circle-list__item__grouped"><div id="reference-135-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Bowyer</span>, <span class="given-names">S. A.</span></span>, <span class="string-name"><span class="surname">Davies</span>, <span class="given-names">B. L.</span></span> and <span class="string-name"><span class="surname">Baena</span>, <span class="given-names">F. R.</span></span>, “<span class="article-title">Active constraints/Virtual fixtures: A survay</span>,” <span class="source">IEEE Trans Robot</span> <span class="volume">30</span>(<span class="issue">1</span>), <span class="fpage">138</span>–<span class="lpage">157</span> (<span class="year">2014</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Active constraints/Virtual fixtures: A survay' href=https://dx.doi.org/10.1109/TRO.2013.2283410>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Active constraints/Virtual fixtures: A survay' href=https://scholar.google.com/scholar_lookup?title=Active+constraints%2FVirtual+fixtures%3A+A+survay&author=Bowyer+S.+A.&author=Davies+B.+L.&author=Baena+F.+R.&publication+year=2014&journal=IEEE+Trans+Robot&volume=30&doi=10.1109%2FTRO.2013.2283410&pages=138-157>Google Scholar</a></div></div></div><div id="ref136" aria-flowto="reference-136-content reference-136-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 136 in the content" id="reference-136-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [136] </div> <div class="circle-list__item__grouped"><div id="reference-136-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Li</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Ishii</span>, <span class="given-names">M.</span></span> and <span class="string-name"><span class="surname">Taylor</span>, <span class="given-names">R. H.</span></span>, “<span class="article-title">Spatial motion constraints using virtual fixtures generated by anatomy</span>,” <span class="source">IEEE Trans Robot</span> <span class="volume">23</span>(<span class="issue">1</span>), <span class="fpage">4</span>–<span class="lpage">19</span> (<span class="year">2007</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Spatial motion constraints using virtual fixtures generated by anatomy' href=https://dx.doi.org/10.1109/TRO.2006.886838>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Spatial motion constraints using virtual fixtures generated by anatomy' href=https://scholar.google.com/scholar_lookup?title=Spatial+motion+constraints+using+virtual+fixtures+generated+by+anatomy&author=Li+M.&author=Ishii+M.&author=Taylor+R.+H.&publication+year=2007&journal=IEEE+Trans+Robot&volume=23&doi=10.1109%2FTRO.2006.886838&pages=4-19>Google Scholar</a></div></div></div><div id="ref137" aria-flowto="reference-137-content reference-137-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 137 in the content" id="reference-137-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [137] </div> <div class="circle-list__item__grouped"><div id="reference-137-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Marinho</span>, <span class="given-names">M. M.</span></span>, <span class="string-name"><span class="surname">Adorno</span>, <span class="given-names">B. V.</span></span>, <span class="string-name"><span class="surname">k.</span>, <span class="given-names">H.</span></span> and <span class="string-name"><span class="surname">Mitsuishi</span>, <span class="given-names">M.</span></span>, “<span class="article-title">Dynamic active constraints for surgical robots using vector field inequalities</span>,” <span class="source">IEEE Trans Robot</span> <span class="volume">35</span>(<span class="issue">5</span>), <span class="fpage">1166</span>–<span class="lpage">1185</span> (<span class="year">2019</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Dynamic active constraints for surgical robots using vector field inequalities' href=https://dx.doi.org/10.1109/TRO.2019.2920078>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Dynamic active constraints for surgical robots using vector field inequalities' href=https://scholar.google.com/scholar_lookup?title=Dynamic+active+constraints+for+surgical+robots+using+vector+field+inequalities&author=Marinho+M.+M.&author=Adorno+B.+V.&author=k.+H.&author=Mitsuishi+M.&publication+year=2019&journal=IEEE+Trans+Robot&volume=35&doi=10.1109%2FTRO.2019.2920078&pages=1166-1185>Google Scholar</a></div></div></div><div id="ref138" aria-flowto="reference-138-content reference-138-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 138 in the content" id="reference-138-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [138] </div> <div class="circle-list__item__grouped"><div id="reference-138-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Ames</span>, <span class="given-names">A. D.</span></span>, <span class="string-name"><span class="surname">Coogan</span>, <span class="given-names">S.</span></span>, <span class="string-name"><span class="surname">Egerstedt</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Notomista</span>, <span class="given-names">G.</span></span>, <span class="string-name"><span class="surname">Sreenath</span>, <span class="given-names">K.</span></span> and <span class="string-name"><span class="surname">Tabuada</span>, <span class="given-names">P.</span></span>, “<span class="article-title">Control Barrier Functions: Theory and Applications</span>,” In: <em class="italic">Proc. 18th European Control Conference (ECC)</em>, (<span class="year">2019</span>) pp. <span class="fpage">3420</span>–<span class="lpage">3431</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Control Barrier Functions: Theory and Applications' href=https://scholar.google.com/scholar_lookup?title=Control+Barrier+Functions%3A+Theory+and+Applications&author=Ames+A.+D.&author=Coogan+S.&author=Egerstedt+M.&author=Notomista+G.&author=Sreenath+K.&author=Tabuada+P.&publication+year=2019>Google Scholar</a></div></div></div><div id="ref139" aria-flowto="reference-139-content reference-139-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 139 in the content" id="reference-139-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [139] </div> <div class="circle-list__item__grouped"><div id="reference-139-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Kim</span>, <span class="given-names">U.</span></span>, <span class="string-name"><span class="surname">Kim</span>, <span class="given-names">Y. B.</span></span>, <span class="string-name"><span class="surname">Seok</span>, <span class="given-names">D.-Y.</span></span>, <span class="string-name"><span class="surname">So</span>, <span class="given-names">J.</span></span> and <span class="string-name"><span class="surname">Choi</span>, <span class="given-names">H. R.</span></span>, “<span class="article-title">A surgical palpation probe with 6-axis force/torque sensing capability for minimally invasive surgery</span>,” <span class="source">IEEE Trans Ind Electron</span> <span class="volume">65</span>(<span class="issue">3</span>), <span class="fpage">2755</span>–<span class="lpage">2765</span> (<span class="year">2018</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for A surgical palpation probe with 6-axis force/torque sensing capability for minimally invasive surgery' href=https://dx.doi.org/10.1109/TIE.2017.2739681>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for A surgical palpation probe with 6-axis force/torque sensing capability for minimally invasive surgery' href=https://scholar.google.com/scholar_lookup?title=A+surgical+palpation+probe+with+6-axis+force%2Ftorque+sensing+capability+for+minimally+invasive+surgery&author=Kim+U.&author=Kim+Y.+B.&author=Seok+D.-Y.&author=So+J.&author=Choi+H.+R.&publication+year=2018&journal=IEEE+Trans+Ind+Electron&volume=65&doi=10.1109%2FTIE.2017.2739681&pages=2755-2765>Google Scholar</a></div></div></div><div id="ref140" aria-flowto="reference-140-content reference-140-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 140 in the content" id="reference-140-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [140] </div> <div class="circle-list__item__grouped"><div id="reference-140-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Lee</span>, <span class="given-names">D.-H.</span></span>, <span class="string-name"><span class="surname">Kim</span>, <span class="given-names">U.</span></span>, <span class="string-name"><span class="surname">Gulrez</span>, <span class="given-names">T.</span></span>, <span class="string-name"><span class="surname">Yoon</span>, <span class="given-names">W. J.</span></span>, <span class="string-name"><span class="surname">Hannaford</span>, <span class="given-names">B.</span></span> and <span class="string-name"><span class="surname">Choi</span>, <span class="given-names">H. R.</span></span>, “<span class="article-title">A laparoscopic grasping tool with force sensing capability</span>,” <span class="source">IEEE/ASME Trans Mech</span> <span class="volume">21</span>(<span class="issue">1</span>), <span class="fpage">130</span>–<span class="lpage">141</span> (<span class="year">2016</span>).<a class='ref-link' target='_blank' aria-label='Google Scholar link for A laparoscopic grasping tool with force sensing capability' href=https://scholar.google.com/scholar_lookup?title=A+laparoscopic+grasping+tool+with+force+sensing+capability&author=Lee+D.-H.&author=Kim+U.&author=Gulrez+T.&author=Yoon+W.+J.&author=Hannaford+B.&author=Choi+H.+R.&publication+year=2016&journal=IEEE%2FASME+Trans+Mech&volume=21&pages=130-141>Google Scholar</a></div></div></div><div id="ref141" aria-flowto="reference-141-content reference-141-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 141 in the content" id="reference-141-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [141] </div> <div class="circle-list__item__grouped"><div id="reference-141-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Catalano</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Grioli</span>, <span class="given-names">G.</span></span>, <span class="string-name"><span class="surname">Farnioli</span>, <span class="given-names">E.</span></span>, <span class="string-name"><span class="surname">Serio</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Piazza</span>, <span class="given-names">C.</span></span> and <span class="string-name"><span class="surname">Bicchi</span>, <span class="given-names">A.</span></span>, “<span class="article-title">Adaptive synergies for the design and control of the pisa/iit softhand</span>,” <span class="source">Int J Robot Res</span> <span class="volume">33</span>(<span class="issue">5</span>), <span class="fpage">768</span>–<span class="lpage">782</span> (<span class="year">2014</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Adaptive synergies for the design and control of the pisa/iit softhand' href=https://dx.doi.org/10.1177/0278364913518998>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Adaptive synergies for the design and control of the pisa/iit softhand' href=https://scholar.google.com/scholar_lookup?title=Adaptive+synergies+for+the+design+and+control+of+the+pisa%2Fiit+softhand&author=Catalano+M.&author=Grioli+G.&author=Farnioli+E.&author=Serio+A.&author=Piazza+C.&author=Bicchi+A.&publication+year=2014&journal=Int+J+Robot+Res&volume=33&doi=10.1177%2F0278364913518998&pages=768-782>Google Scholar</a></div></div></div><div id="ref142" aria-flowto="reference-142-content reference-142-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 142 in the content" id="reference-142-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [142] </div> <div class="circle-list__item__grouped"><div id="reference-142-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Piazza</span>, <span class="given-names">C.</span></span>, <span class="string-name"><span class="surname">Catalano</span>, <span class="given-names">M. G.</span></span>, <span class="string-name"><span class="surname">Godfrey</span>, <span class="given-names">S. B.</span></span>, <span class="string-name"><span class="surname">Rossi</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Grioli</span>, <span class="given-names">G.</span></span>, <span class="string-name"><span class="surname">Bianchi</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Zhao</span>, <span class="given-names">K.</span></span> and <span class="string-name"><span class="surname">Bicchi</span>, <span class="given-names">A.</span></span>, “<span class="article-title">The softhand pro-h: A hybrid body-controlled, electrically powered hand prosthesis for daily living and working</span>,” <span class="source">IEEE Robot Autom Mag</span> <span class="volume">24</span>(<span class="issue">4</span>), <span class="fpage">87</span>–<span class="lpage">101</span> (<span class="year">2017</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for The softhand pro-h: A hybrid body-controlled, electrically powered hand prosthesis for daily living and working' href=https://dx.doi.org/10.1109/MRA.2017.2751662>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for The softhand pro-h: A hybrid body-controlled, electrically powered hand prosthesis for daily living and working' href=https://scholar.google.com/scholar_lookup?title=The+softhand+pro-h%3A+A+hybrid+body-controlled%2C+electrically+powered+hand+prosthesis+for+daily+living+and+working&author=Piazza+C.&author=Catalano+M.+G.&author=Godfrey+S.+B.&author=Rossi+M.&author=Grioli+G.&author=Bianchi+M.&author=Zhao+K.&author=Bicchi+A.&publication+year=2017&journal=IEEE+Robot+Autom+Mag&volume=24&doi=10.1109%2FMRA.2017.2751662&pages=87-101>Google Scholar</a></div></div></div><div id="ref143" aria-flowto="reference-143-content reference-143-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 143 in the content" id="reference-143-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [143] </div> <div class="circle-list__item__grouped"><div id="reference-143-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Moccia</span>, <span class="given-names">R.</span></span>, <span class="string-name"><span class="surname">Selvaggio</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Villani</span>, <span class="given-names">L.</span></span>, <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span> and <span class="string-name"><span class="surname">Ficuciello</span>, <span class="given-names">F.</span></span>. <span class="article-title">Vision-Based Virtual Fixtures Generation for Robotic-Assisted Polyp Dissection Procedures</span>. In: <em class="italic">IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)</em>, (<span class="year">2019</span>) pp. <span class="fpage">7934</span>–<span class="lpage">7939</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Vision-Based Virtual Fixtures Generation for Robotic-Assisted Polyp Dissection Procedures' href=https://scholar.google.com/scholar_lookup?title=Vision-Based+Virtual+Fixtures+Generation+for+Robotic-Assisted+Polyp+Dissection+Procedures&author=Moccia+R.&author=Selvaggio+M.&author=Villani+L.&author=Siciliano+B.&author=Ficuciello+F.&publication+year=2019>Google Scholar</a></div></div></div><div id="ref144" aria-flowto="reference-144-content reference-144-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 144 in the content" id="reference-144-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [144] </div> <div class="circle-list__item__grouped"><div id="reference-144-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Moccia</span>, <span class="given-names">R.</span></span>, <span class="string-name"><span class="surname">Iacono</span>, <span class="given-names">C.</span></span>, <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span> and <span class="string-name"><span class="surname">Ficuciello</span>, <span class="given-names">F.</span></span>, “<span class="article-title">Vision-based dynamic virtual fixtures for tools collision avoidance in robotic surgery</span>,” <span class="source">IEEE Robot Auto Lett</span> <span class="volume">5</span>(<span class="issue">2</span>), <span class="fpage">1650</span>–<span class="lpage">1655</span> (<span class="year">2020</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Vision-based dynamic virtual fixtures for tools collision avoidance in robotic surgery' href=https://dx.doi.org/10.1109/LRA.2020.2969941>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Vision-based dynamic virtual fixtures for tools collision avoidance in robotic surgery' href=https://scholar.google.com/scholar_lookup?title=Vision-based+dynamic+virtual+fixtures+for+tools+collision+avoidance+in+robotic+surgery&author=Moccia+R.&author=Iacono+C.&author=Siciliano+B.&author=Ficuciello+F.&publication+year=2020&journal=IEEE+Robot+Auto+Lett&volume=5&doi=10.1109%2FLRA.2020.2969941&pages=1650-1655>Google Scholar</a></div></div></div><div id="ref145" aria-flowto="reference-145-content reference-145-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 145 in the content" id="reference-145-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [145] </div> <div class="circle-list__item__grouped"><div id="reference-145-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Liu</span>, <span class="given-names">H.</span></span>, <span class="string-name"><span class="surname">Selvaggio</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Ferrentino</span>, <span class="given-names">P.</span></span>, <span class="string-name"><span class="surname">Moccia</span>, <span class="given-names">R.</span></span>, <span class="string-name"><span class="surname">Pirozzi</span>, <span class="given-names">S.</span></span>, <span class="string-name"><span class="surname">Bracale</span>, <span class="given-names">U.</span></span> and <span class="string-name"><span class="surname">Ficuciello</span>, <span class="given-names">F.</span></span>, “<span class="article-title">The MUSHA hand II: A multi-functional hand for robot-assisted laparoscopic surgery</span>,” <span class="source">IEEE/ASME Trans Mech</span> <span class="volume">26</span>(<span class="issue">1</span>), <span class="fpage">393</span>–<span class="lpage">404</span> (<span class="year">2020</span>).<a class='ref-link' target='_blank' aria-label='Google Scholar link for The MUSHA hand II: A multi-functional hand for robot-assisted laparoscopic surgery' href=https://scholar.google.com/scholar_lookup?title=The+MUSHA+hand+II%3A+A+multi-functional+hand+for+robot-assisted+laparoscopic+surgery&author=Liu+H.&author=Selvaggio+M.&author=Ferrentino+P.&author=Moccia+R.&author=Pirozzi+S.&author=Bracale+U.&author=Ficuciello+F.&publication+year=2020&journal=IEEE%2FASME+Trans+Mech&volume=26&pages=393-404>Google Scholar</a></div></div></div><div id="ref146" aria-flowto="reference-146-content reference-146-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><!----></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [146] </div> <div class="circle-list__item__grouped"><div id="reference-146-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Saini</span>, <span class="given-names">S.</span></span>, <span class="string-name"><span class="surname">Ficuciello</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Liu</span>, <span class="given-names">H.</span></span> and <span class="collab">U</span>, <span class="chapter-title">Bracale Patent Granted n. 102019000001187, Application Submission Date Jan 2019</span>,” In: <span class="source">Elemento Terminale Per Dispositivi Di Presa Per Interventi Chirurgici, in Particolare Interventi a Minima Invasività</span>, (<span class="year">2019</span>).<a class='ref-link' target='_blank' aria-label='Google Scholar link for Elemento Terminale Per Dispositivi Di Presa Per Interventi Chirurgici, in Particolare Interventi a Minima Invasività' href=https://scholar.google.com/scholar_lookup?title=Elemento+Terminale+Per+Dispositivi+Di+Presa+Per+Interventi+Chirurgici%2C+in+Particolare+Interventi+a+Minima+Invasivit%C3%A0&author=Saini+S.&author=Ficuciello+F.&author=Liu+H.&publication+year=2019>Google Scholar</a></div></div></div><div id="ref147" aria-flowto="reference-147-content reference-147-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 147 in the content" id="reference-147-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [147] </div> <div class="circle-list__item__grouped"><div id="reference-147-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Selvaggio</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Fontanelli</span>, <span class="given-names">G. A.</span></span>, <span class="string-name"><span class="surname">Marrazzo</span>, <span class="given-names">V. R.</span></span>, <span class="string-name"><span class="surname">Bracale</span>, <span class="given-names">U.</span></span>, <span class="string-name"><span class="surname">Irace</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Breglio</span>, <span class="given-names">G.</span></span>, <span class="string-name"><span class="surname">Villani</span>, <span class="given-names">L.</span></span>, <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span> and <span class="string-name"><span class="surname">Ficuciello</span>, <span class="given-names">F.</span></span>, “<span class="article-title">The musha underactuated hand for robot-aided minimally invasive surgery</span>,” <span class="source">Int J Med Robot Comp Assis Surg</span> <span class="volume">15</span>(<span class="issue">3</span>), <span class="fpage">e1981</span> (<span class="year">2019</span>a).<a class='ref-link' target='_blank' aria-label='CrossRef link for The musha underactuated hand for robot-aided minimally invasive surgery' href=https://dx.doi.org/10.1002/rcs.1981>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for The musha underactuated hand for robot-aided minimally invasive surgery' href=https://scholar.google.com/scholar_lookup?title=The+musha+underactuated+hand+for+robot-aided+minimally+invasive+surgery&author=Selvaggio+M.&author=Fontanelli+G.+A.&author=Marrazzo+V.+R.&author=Bracale+U.&author=Irace+A.&author=Breglio+G.&author=Villani+L.&author=Siciliano+B.&author=Ficuciello+F.&publication+year=2019&journal=Int+J+Med+Robot+Comp+Assis+Surg&volume=15&doi=10.1002%2Frcs.1981>Google Scholar</a><a class='ref-link' target='_blank' aria-label='PubMed link for The musha underactuated hand for robot-aided minimally invasive surgery' href=https://www.ncbi.nlm.nih.gov/pubmed/30588772>PubMed</a></div></div></div><div id="ref148" aria-flowto="reference-148-content reference-148-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 148 in the content" id="reference-148-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [148] </div> <div class="circle-list__item__grouped"><div id="reference-148-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Moccia</span>, <span class="given-names">R.</span></span> and <span class="string-name"><span class="surname">Ficuciello</span>, <span class="given-names">F.</span></span>, “<span class="article-title">Autonomous Endoscope Control Algorithm with Visibility and Joint Limits Avoidance Constraints for Da Vinci Research kit robot</span>,” In: <em class="italic">IEEE International Conference on Robotics and Automation (ICRA)</em>, (<span class="year">2023</span>) pp. <span class="fpage">776</span>–<span class="lpage">781</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Autonomous Endoscope Control Algorithm with Visibility and Joint Limits Avoidance Constraints for Da Vinci Research kit robot' href=https://scholar.google.com/scholar_lookup?title=Autonomous+Endoscope+Control+Algorithm+with+Visibility+and+Joint+Limits+Avoidance+Constraints+for+Da+Vinci+Research+kit+robot&author=Moccia+R.&author=Ficuciello+F.&publication+year=2023>Google Scholar</a></div></div></div><div id="ref149" aria-flowto="reference-149-content reference-149-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 149 in the content" id="reference-149-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [149] </div> <div class="circle-list__item__grouped"><div id="reference-149-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Ferro</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Brunori</span>, <span class="given-names">D.</span></span>, <span class="string-name"><span class="surname">Magistri</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Saiella</span>, <span class="given-names">L.</span></span>, <span class="string-name"><span class="surname">Selvaggio</span>, <span class="given-names">M.</span></span> and <span class="string-name"><span class="surname">Fontanelli</span>, <span class="given-names">G. A.</span></span>, “<span class="article-title">A Portable Da Vinci Simulator in Virtual Reality</span>,” In: <em class="italic">Third IEEE International Conference on Robotic Computing (IRC)</em>, (<span class="year">2019</span>) pp. <span class="fpage">447</span>–<span class="lpage">448</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for A Portable Da Vinci Simulator in Virtual Reality' href=https://scholar.google.com/scholar_lookup?title=A+Portable+Da+Vinci+Simulator+in+Virtual+Reality&author=Ferro+M.&author=Brunori+D.&author=Magistri+F.&author=Saiella+L.&author=Selvaggio+M.&author=Fontanelli+G.+A.&publication+year=2019>Google Scholar</a></div></div></div><div id="ref150" aria-flowto="reference-150-content reference-150-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 150 in the content" id="reference-150-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [150] </div> <div class="circle-list__item__grouped"><div id="reference-150-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Fontanelli</span>, <span class="given-names">G. A.</span></span>, <span class="string-name"><span class="surname">Selvaggio</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Ferro</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Ficuciello</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Vendittelli</span>, <span class="given-names">M.</span></span> and <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span>, “<span class="article-title">Portable dVRK: An augmented V-REP simulator of da vinci research kit</span>,” <span class="source">Acta Polytech Hung</span> <span class="volume">16</span>(<span class="issue">8</span>), <span class="fpage">79</span>–<span class="lpage">98</span> (<span class="year">2019</span>).<a class='ref-link' target='_blank' aria-label='Google Scholar link for Portable dVRK: An augmented V-REP simulator of da vinci research kit' href=https://scholar.google.com/scholar_lookup?title=Portable+dVRK%3A+An+augmented+V-REP+simulator+of+da+vinci+research+kit&author=Fontanelli+G.+A.&author=Selvaggio+M.&author=Ferro+M.&author=Ficuciello+F.&author=Vendittelli+M.&author=Siciliano+B.&publication+year=2019&journal=Acta+Polytech+Hung&volume=16&pages=79-98>Google Scholar</a></div></div></div><div id="ref151" aria-flowto="reference-151-content reference-151-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 151 in the content" id="reference-151-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [151] </div> <div class="circle-list__item__grouped"><div id="reference-151-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Ghafoor</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Dai</span>, <span class="given-names">J. S.</span></span> and <span class="string-name"><span class="surname">Duffy</span>, <span class="given-names">J.</span></span>, “<span class="article-title">Stiffness modeling of the soft-finger contact in robotic grasping</span>,” <span class="source">J Mech Design</span> <span class="volume">126</span>(<span class="issue">4</span>), <span class="fpage">646</span>–<span class="lpage">656</span> (<span class="year">2004</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Stiffness modeling of the soft-finger contact in robotic grasping' href=https://dx.doi.org/10.1115/1.1758255>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Stiffness modeling of the soft-finger contact in robotic grasping' href=https://scholar.google.com/scholar_lookup?title=Stiffness+modeling+of+the+soft-finger+contact+in+robotic+grasping&author=Ghafoor+A.&author=Dai+J.+S.&author=Duffy+J.&publication+year=2004&journal=J+Mech+Design&volume=126&doi=10.1115%2F1.1758255&pages=646-656>Google Scholar</a></div></div></div><div id="ref152" aria-flowto="reference-152-content reference-152-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 152 in the content" id="reference-152-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [152] </div> <div class="circle-list__item__grouped"><div id="reference-152-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Sallam</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Fontanelli</span>, <span class="given-names">G. A.</span></span>, <span class="string-name"><span class="surname">Gallo</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">La Rocca</span>, <span class="given-names">R.</span></span>, <span class="string-name"><span class="surname">Di Spiezio Sardo</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Longo</span>, <span class="given-names">N.</span></span> and <span class="string-name"><span class="surname">Ficuciello</span>, <span class="given-names">F.</span></span>, “<span class="article-title">Prototype realization of a human hand-inspired needle driver for robotic-assisted surgery</span>,” <span class="source">IEEE Trans Med Robot Bio</span> <span class="volume">5</span>(<span class="issue">4</span>), <span class="fpage">843</span>–<span class="lpage">856</span> (<span class="year">2023</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Prototype realization of a human hand-inspired needle driver for robotic-assisted surgery' href=https://dx.doi.org/10.1109/TMRB.2023.3309942>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Prototype realization of a human hand-inspired needle driver for robotic-assisted surgery' href=https://scholar.google.com/scholar_lookup?title=Prototype+realization+of+a+human+hand-inspired+needle+driver+for+robotic-assisted+surgery&author=Sallam+M.&author=Fontanelli+G.+A.&author=Gallo+A.&author=La+Rocca+R.&author=Di+Spiezio+Sardo+A.&author=Longo+N.&author=Ficuciello+F.&publication+year=2023&journal=IEEE+Trans+Med+Robot+Bio&volume=5&doi=10.1109%2FTMRB.2023.3309942&pages=843-856>Google Scholar</a></div></div></div><div id="ref153" aria-flowto="reference-153-content reference-153-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 153 in the content" id="reference-153-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [153] </div> <div class="circle-list__item__grouped"><div id="reference-153-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Coevoet</span>, <span class="given-names">E.</span></span>, <span class="string-name"><span class="surname">Adagolodjo</span>, <span class="given-names">Y.</span></span>, <span class="string-name"><span class="surname">Lin</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Duriez</span>, <span class="given-names">C.</span></span> and <span class="string-name"><span class="surname">Ficuciello</span>, <span class="given-names">F.</span></span>, “<span class="article-title">Planning of soft-rigid hybrid arms in contact with compliant environment: Application to the transrectal biopsy of the prostate</span>,” <span class="source">IEEE Robot Auto Lett</span> <span class="volume">7</span>(<span class="issue">2</span>), <span class="fpage">4853</span>–<span class="lpage">4860</span> (<span class="year">2022</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Planning of soft-rigid hybrid arms in contact with compliant environment: Application to the transrectal biopsy of the prostate' href=https://dx.doi.org/10.1109/LRA.2022.3152322>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Planning of soft-rigid hybrid arms in contact with compliant environment: Application to the transrectal biopsy of the prostate' href=https://scholar.google.com/scholar_lookup?title=Planning+of+soft-rigid+hybrid+arms+in+contact+with+compliant+environment%3A+Application+to+the+transrectal+biopsy+of+the+prostate&author=Coevoet+E.&author=Adagolodjo+Y.&author=Lin+M.&author=Duriez+C.&author=Ficuciello+F.&publication+year=2022&journal=IEEE+Robot+Auto+Lett&volume=7&doi=10.1109%2FLRA.2022.3152322&pages=4853-4860>Google Scholar</a></div></div></div><div id="ref154" aria-flowto="reference-154-content reference-154-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 154 in the content" id="reference-154-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [154] </div> <div class="circle-list__item__grouped"><div id="reference-154-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Canbay</span>, <span class="given-names">D.</span></span>, <span class="string-name"><span class="surname">Ferrentino</span>, <span class="given-names">P.</span></span>, <span class="string-name"><span class="surname">Liu</span>, <span class="given-names">H.</span></span>, <span class="string-name"><span class="surname">Moccia</span>, <span class="given-names">R.</span></span>, <span class="string-name"><span class="surname">Pirozzi</span>, <span class="given-names">S.</span></span>, <span class="string-name"><span class="surname">Siciliano</span>, <span class="given-names">B.</span></span> and <span class="string-name"><span class="surname">Ficuciello</span>, <span class="given-names">F.</span></span>, “<span class="article-title">Calibration of Tactile/Force Sensors for Grasping with the PRISMA Hand II</span>,” In: <em class="italic">IEEE/ASME International Conference on Advanced Intelligent Mechatronics (AIM)</em>, (<span class="year">2021</span>) pp. <span class="fpage">442</span>–<span class="lpage">447</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Calibration of Tactile/Force Sensors for Grasping with the PRISMA Hand II' href=https://scholar.google.com/scholar_lookup?title=Calibration+of+Tactile%2FForce+Sensors+for+Grasping+with+the+PRISMA+Hand+II&author=Canbay+D.&author=Ferrentino+P.&author=Liu+H.&author=Moccia+R.&author=Pirozzi+S.&author=Siciliano+B.&author=Ficuciello+F.&publication+year=2021>Google Scholar</a></div></div></div><div id="ref155" aria-flowto="reference-155-content reference-155-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 155 in the content" id="reference-155-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [155] </div> <div class="circle-list__item__grouped"><div id="reference-155-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Leccia</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Sallam</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Grazioso</span>, <span class="given-names">S.</span></span>, <span class="string-name"><span class="surname">Caporaso</span>, <span class="given-names">T.</span></span>, <span class="string-name"><span class="surname">Di Gironimo</span>, <span class="given-names">G.</span></span> and <span class="string-name"><span class="surname">Ficuciello</span>, <span class="given-names">F.</span></span>, “<span class="article-title">Development and testing of a virtual simulator for a myoelectric prosthesis prototype – the prisma hand ii – to improve its usability and acceptability</span>,” <span class="source">Eng Appl Artif Intel</span> <span class="volume">121</span>, <span class="fpage">105853</span> (<span class="year">2023</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Development and testing of a virtual simulator for a myoelectric prosthesis prototype – the prisma hand ii – to improve its usability and acceptability' href=https://dx.doi.org/10.1016/j.engappai.2023.105853>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Development and testing of a virtual simulator for a myoelectric prosthesis prototype – the prisma hand ii – to improve its usability and acceptability' href=https://scholar.google.com/scholar_lookup?title=Development+and+testing+of+a+virtual+simulator+for+a+myoelectric+prosthesis+prototype+%E2%80%93+the+prisma+hand+ii+%E2%80%93+to+improve+its+usability+and+acceptability&author=Leccia+A.&author=Sallam+M.&author=Grazioso+S.&author=Caporaso+T.&author=Di+Gironimo+G.&author=Ficuciello+F.&publication+year=2023&journal=Eng+Appl+Artif+Intel&volume=121&doi=10.1016%2Fj.engappai.2023.105853>Google Scholar</a></div></div></div><div id="ref156" aria-flowto="reference-156-content reference-156-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 156 in the content" id="reference-156-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [156] </div> <div class="circle-list__item__grouped"><div id="reference-156-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Gong</span>, <span class="given-names">Y.</span></span>, <span class="string-name"><span class="surname">Sun</span>, <span class="given-names">G.</span></span>, <span class="string-name"><span class="surname">Nair</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">Bidwai</span>, <span class="given-names">A.</span></span>, <span class="string-name"><span class="surname">R.</span>, <span class="given-names">C. S.</span></span>, <span class="string-name"><span class="surname">Grezmak</span>, <span class="given-names">J.</span></span>, <span class="string-name"><span class="surname">Sartoretti</span>, <span class="given-names">G.</span></span> and <span class="string-name"><span class="surname">Daltorio</span>, <span class="given-names">K. A.</span></span>, “<span class="article-title">Legged robots for object manipulation: A review</span>,” <span class="source">Front Mech Eng</span> <span class="volume">9</span> (<span class="year">2023</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Legged robots for object manipulation: A review' href=https://dx.doi.org/10.3389/fmech.2023.1142421>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Legged robots for object manipulation: A review' href=https://scholar.google.com/scholar_lookup?title=Legged+robots+for+object+manipulation%3A+A+review&author=Gong+Y.&author=Sun+G.&author=Nair+A.&author=Bidwai+A.&author=R.+C.+S.&author=Grezmak+J.&author=Sartoretti+G.&author=Daltorio+K.+A.&publication+year=2023&journal=Front+Mech+Eng&volume=9&doi=10.3389%2Ffmech.2023.1142421>Google Scholar</a></div></div></div><div id="ref157" aria-flowto="reference-157-content reference-157-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 157 in the content" id="reference-157-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [157] </div> <div class="circle-list__item__grouped"><div id="reference-157-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Jia</span>, <span class="given-names">G.</span></span>, <span class="string-name"><span class="surname">Huang</span>, <span class="given-names">H.</span></span>, <span class="string-name"><span class="surname">Li</span>, <span class="given-names">B.</span></span>, <span class="string-name"><span class="surname">Wu</span>, <span class="given-names">Y.</span></span>, <span class="string-name"><span class="surname">Cao</span>, <span class="given-names">Q.</span></span> and <span class="string-name"><span class="surname">Guo</span>, <span class="given-names">H.</span></span>, “<span class="article-title">Synthesis of a novel type of metamorphic mechanism module for large scale deployable grasping manipulators</span>,” <span class="source">Mech Mach Theory</span> <span class="volume">128</span>, <span class="fpage">544</span>–<span class="lpage">559</span> (<span class="year">2018</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Synthesis of a novel type of metamorphic mechanism module for large scale deployable grasping manipulators' href=https://dx.doi.org/10.1016/j.mechmachtheory.2018.06.017>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Synthesis of a novel type of metamorphic mechanism module for large scale deployable grasping manipulators' href=https://scholar.google.com/scholar_lookup?title=Synthesis+of+a+novel+type+of+metamorphic+mechanism+module+for+large+scale+deployable+grasping+manipulators&author=Jia+G.&author=Huang+H.&author=Li+B.&author=Wu+Y.&author=Cao+Q.&author=Guo+H.&publication+year=2018&journal=Mech+Mach+Theory&volume=128&doi=10.1016%2Fj.mechmachtheory.2018.06.017&pages=544-559>Google Scholar</a></div></div></div><div id="ref158" aria-flowto="reference-158-content reference-158-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 158 in the content" id="reference-158-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [158] </div> <div class="circle-list__item__grouped"><div id="reference-158-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Jia</span>, <span class="given-names">G.</span></span>, <span class="string-name"><span class="surname">Huang</span>, <span class="given-names">H.</span></span>, <span class="string-name"><span class="surname">Wang</span>, <span class="given-names">S.</span></span> and <span class="string-name"><span class="surname">Li</span>, <span class="given-names">B.</span></span>, “<span class="article-title">Type synthesis of plane-symmetric deployable grasping parallel mechanisms using constraint force parallelogram law</span>,” <span class="source">Mech Mach Theory</span> <span class="volume">161</span>, <span class="fpage">104330</span> (<span class="year">2021</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Type synthesis of plane-symmetric deployable grasping parallel mechanisms using constraint force parallelogram law' href=https://dx.doi.org/10.1016/j.mechmachtheory.2021.104330>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Type synthesis of plane-symmetric deployable grasping parallel mechanisms using constraint force parallelogram law' href=https://scholar.google.com/scholar_lookup?title=Type+synthesis+of+plane-symmetric+deployable+grasping+parallel+mechanisms+using+constraint+force+parallelogram+law&author=Jia+G.&author=Huang+H.&author=Wang+S.&author=Li+B.&publication+year=2021&journal=Mech+Mach+Theory&volume=161&doi=10.1016%2Fj.mechmachtheory.2021.104330>Google Scholar</a></div></div></div><div id="ref159" aria-flowto="reference-159-content reference-159-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 159 in the content" id="reference-159-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [159] </div> <div class="circle-list__item__grouped"><div id="reference-159-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Bellicoso</span>, <span class="given-names">C. D.</span></span>, <span class="string-name"><span class="surname">Krämer</span>, <span class="given-names">K.</span></span>, <span class="string-name"><span class="surname">Stäuble</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Sako</span>, <span class="given-names">D.</span></span>, <span class="string-name"><span class="surname">Jenelten</span>, <span class="given-names">F.</span></span>, <span class="string-name"><span class="surname">Bjelonic</span>, <span class="given-names">M.</span></span> and <span class="string-name"><span class="surname">Hutter</span>, <span class="given-names">M.</span></span>, “<span class="article-title">Alma - Articulated Locomotion and Manipulation for a Torque-Controllable Robot</span>,” In: <em class="italic">International Conference on Robotics and Automation (ICRA)</em>, (<span class="year">2019</span>) pp. <span class="fpage">8477</span>–<span class="lpage">8483</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Alma - Articulated Locomotion and Manipulation for a Torque-Controllable Robot' href=https://scholar.google.com/scholar_lookup?title=Alma+-+Articulated+Locomotion+and+Manipulation+for+a+Torque-Controllable+Robot&author=Bellicoso+C.+D.&author=Kr%C3%A4mer+K.&author=St%C3%A4uble+M.&author=Sako+D.&author=Jenelten+F.&author=Bjelonic+M.&author=Hutter+M.&publication+year=2019>Google Scholar</a></div></div></div><div id="ref160" aria-flowto="reference-160-content reference-160-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 160 in the content" id="reference-160-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [160] </div> <div class="circle-list__item__grouped"><div id="reference-160-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Ferrolho</span>, <span class="given-names">H.</span></span>, <span class="string-name"><span class="surname">Ivan</span>, <span class="given-names">V.</span></span>, <span class="string-name"><span class="surname">Merkt</span>, <span class="given-names">W.</span></span>, <span class="string-name"><span class="surname">Havoutis</span>, <span class="given-names">I.</span></span> and <span class="string-name"><span class="surname">Vijayakumar</span>, <span class="given-names">S.</span></span>, “<span class="article-title">Roloma: Robust loco-manipulation for quadruped robots with arms</span>,” <span class="source">Auton Robot</span> <span class="volume">47</span>(<span class="issue">8</span>), <span class="fpage">1463</span>–<span class="lpage">1481</span> (<span class="year">2023</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Roloma: Robust loco-manipulation for quadruped robots with arms' href=https://dx.doi.org/10.1007/s10514-023-10146-0>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Roloma: Robust loco-manipulation for quadruped robots with arms' href=https://scholar.google.com/scholar_lookup?title=Roloma%3A+Robust+loco-manipulation+for+quadruped+robots+with+arms&author=Ferrolho+H.&author=Ivan+V.&author=Merkt+W.&author=Havoutis+I.&author=Vijayakumar+S.&publication+year=2023&journal=Auton+Robot&volume=47&doi=10.1007%2Fs10514-023-10146-0&pages=1463-1481>Google Scholar</a></div></div></div><div id="ref161" aria-flowto="reference-161-content reference-161-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 161 in the content" id="reference-161-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [161] </div> <div class="circle-list__item__grouped"><div id="reference-161-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Costanzo</span>, <span class="given-names">M.</span></span>, <span class="string-name"><span class="surname">Natale</span>, <span class="given-names">C.</span></span> and <span class="string-name"><span class="surname">Selvaggio</span>, <span class="given-names">M.</span></span>, “<span class="article-title">Visual and Haptic Cues for Human-Robot Handover*</span>,” In: <em class="italic">32nd IEEE International Conference on Robot and Human Interactive Communication (RO–MAN)</em>, (<span class="year">2023</span>) pp. <span class="fpage">2677</span>–<span class="lpage">2682</span>.<a class='ref-link' target='_blank' aria-label='Google Scholar link for Visual and Haptic Cues for Human-Robot Handover*' href=https://scholar.google.com/scholar_lookup?title=Visual+and+Haptic+Cues+for+Human-Robot+Handover*&author=Costanzo+M.&author=Natale+C.&author=Selvaggio+M.&publication+year=2023>Google Scholar</a></div></div></div><div id="ref162" aria-flowto="reference-162-content reference-162-button" class="circle-list__item"><!----> <div class="circle-list__item__indicator"><AppButton icon="up-circle" aria-label="Return to the reference 162 in the content" id="reference-162-button" class="circle-list__item__indicator__up"></AppButton></div> <div aria-hidden="true" data-test-hidden="false" class="circle-list__item__number"> [162] </div> <div class="circle-list__item__grouped"><div id="reference-162-content" class="circle-list__item__grouped__content"><span class="string-name"><span class="surname">Dai</span>, <span class="given-names">J. S.</span></span> and <span class="string-name"><span class="surname">Caldwell</span>, <span class="given-names">D. G.</span></span>, “<span class="article-title">Origami-based robotic paper-and-board packaging for food industry</span>,” <span class="source">Trend Food Sci Tech</span> <span class="volume">21</span>(<span class="issue">3</span>), <span class="fpage">153</span>–<span class="lpage">157</span> (<span class="year">2010</span>).<a class='ref-link' target='_blank' aria-label='CrossRef link for Origami-based robotic paper-and-board packaging for food industry' href=https://dx.doi.org/10.1016/j.tifs.2009.10.007>CrossRef</a><a class='ref-link' target='_blank' aria-label='Google Scholar link for Origami-based robotic paper-and-board packaging for food industry' href=https://scholar.google.com/scholar_lookup?title=Origami-based+robotic+paper-and-board+packaging+for+food+industry&author=Dai+J.+S.&author=Caldwell+D.+G.&publication+year=2010&journal=Trend+Food+Sci+Tech&volume=21&doi=10.1016%2Fj.tifs.2009.10.007&pages=153-157>Google Scholar</a></div></div></div></div></div> <!----></div></div> <!----> <div id="figures-tab" class="figures tab-pane" data-v-241a4b23><div data-v-241a4b23><div class="figures__item" data-v-241a4b23><div class="figures__item__image-box" data-v-241a4b23><button class="figures__ref" data-v-241a4b23> View in content </button> <img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" alt="Figure 0" data-zoomable="true" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary-alt:20240319140930-60176-mediumThumb-png-S026357472400033X_fig1.jpg" data-enlarged-image="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary-alt:20240319140930-79078-optimisedImage-png-S026357472400033X_fig1.jpg" class="graphic" data-v-241a4b23></div> <div data-v-241a4b23><div class="caption" data-v-241a4b23><p data-v-241a4b23><span class="label" data-v-241a4b23>Figure 1.</span> <span data-v-241a4b23><span class="p">Graphical representation of the six research areas and sub-areas dealt with within the PRISMA Lab at the University of Naples Federico II. This article proposes an overview of the main problems addressed in these fields and discuss potential future directions on the topics.</span></span></p></div> </div></div> <hr aria-hidden="true" class="separator dashed" data-v-7036083a data-v-241a4b23></div><div data-v-241a4b23><div class="figures__item" data-v-241a4b23><div class="figures__item__image-box" data-v-241a4b23><button class="figures__ref" data-v-241a4b23> View in content </button> <img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" alt="Figure 1" data-zoomable="true" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab1.png" data-enlarged-image="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab1.png" class="graphic" data-v-241a4b23></div> <div data-v-241a4b23><div class="caption" data-v-241a4b23><p data-v-241a4b23><span class="label" data-v-241a4b23>Table I.</span> <span data-v-241a4b23><span class="p">Summary of PRISMA Lab contributions in the field of dynamic manipulation and locomotion.</span></span></p></div> </div></div> <hr aria-hidden="true" class="separator dashed" data-v-7036083a data-v-241a4b23></div><div data-v-241a4b23><div class="figures__item" data-v-241a4b23><div class="figures__item__image-box" data-v-241a4b23><button class="figures__ref" data-v-241a4b23> View in content </button> <img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" alt="Figure 2" data-zoomable="true" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary-alt:20240319140930-36577-mediumThumb-png-S026357472400033X_fig2.jpg" data-enlarged-image="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary-alt:20240319140930-68785-optimisedImage-png-S026357472400033X_fig2.jpg" class="graphic" data-v-241a4b23></div> <div data-v-241a4b23><div class="caption" data-v-241a4b23><p data-v-241a4b23><span class="label" data-v-241a4b23>Figure 2.</span> <span data-v-241a4b23><span class="p">Tray-based and pushing non-prehensile object manipulation scenarios. Upper row: a robot is tasked with transporting an object placed on a tray-like end-effector along a predefined, fast trajectory while avoiding the relative sliding (a) [20]. The robot performs a linear transporting trajectory while opportunely inclining the tray to improve the robustness of the task performance (b) [19]. Bottom row: an object is pushed by a mobile robot along a trajectory (c) [16]. Multiple robots can push an object with minimal effort by optimally placing themself around it (d) [17].</span></span></p></div> </div></div> <hr aria-hidden="true" class="separator dashed" data-v-7036083a data-v-241a4b23></div><div data-v-241a4b23><div class="figures__item" data-v-241a4b23><div class="figures__item__image-box" data-v-241a4b23><button class="figures__ref" data-v-241a4b23> View in content </button> <img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" alt="Figure 3" data-zoomable="true" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary-alt:20240319140930-77421-mediumThumb-png-S026357472400033X_fig3.jpg" data-enlarged-image="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary-alt:20240319140930-91771-optimisedImage-png-S026357472400033X_fig3.jpg" class="graphic" data-v-241a4b23></div> <div data-v-241a4b23><div class="caption" data-v-241a4b23><p data-v-241a4b23><span class="label" data-v-241a4b23>Figure 3.</span> <span data-v-241a4b23><span class="p">On the left, a quadruped robot is connected to a human through a leash. This scenario was tested in the Gazebo simulation environment emulating a guide dog helping a visually impaired person. In the middle, a legged manipulator transports an object placed on a tray-like end-effector while simultaneously preventing it from sliding. On the right, the model behind this task, where the object (red cube) is prevented from sliding by keeping contact forces (blue) inside the friction cones (green).</span></span></p></div> </div></div> <hr aria-hidden="true" class="separator dashed" data-v-7036083a data-v-241a4b23></div><div data-v-241a4b23><div class="figures__item" data-v-241a4b23><div class="figures__item__image-box" data-v-241a4b23><button class="figures__ref" data-v-241a4b23> View in content </button> <img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" alt="Figure 4" data-zoomable="true" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab2.png" data-enlarged-image="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab2.png" class="graphic" data-v-241a4b23></div> <div data-v-241a4b23><div class="caption" data-v-241a4b23><p data-v-241a4b23><span class="label" data-v-241a4b23>Table II.</span> <span data-v-241a4b23><span class="p">Summary of PRISMA Lab contributions in the field of aerial robotics.</span></span></p></div> </div></div> <hr aria-hidden="true" class="separator dashed" data-v-7036083a data-v-241a4b23></div><div data-v-241a4b23><div class="figures__item" data-v-241a4b23><div class="figures__item__image-box" data-v-241a4b23><button class="figures__ref" data-v-241a4b23> View in content </button> <img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" alt="Figure 5" data-zoomable="true" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary-alt:20240319140930-16354-mediumThumb-png-S026357472400033X_fig4.jpg" data-enlarged-image="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary-alt:20240319140930-93246-optimisedImage-png-S026357472400033X_fig4.jpg" class="graphic" data-v-241a4b23></div> <div data-v-241a4b23><div class="caption" data-v-241a4b23><p data-v-241a4b23><span class="label" data-v-241a4b23>Figure 4.</span> <span data-v-241a4b23><span class="p">Two unmanned aerial manipulators during non-destructive test measurements. On the left, an aerial vehicle equipped with one arm is measuring the thickness of a wall with an ultrasonic probe. On the right, a hybrid drone equipped with a specially developed omnidirectional mobile base that can land on pipelines and then move to position ad-hoc measurement systems for non-destructive test measures.</span></span></p></div> </div></div> <hr aria-hidden="true" class="separator dashed" data-v-7036083a data-v-241a4b23></div><div data-v-241a4b23><div class="figures__item" data-v-241a4b23><div class="figures__item__image-box" data-v-241a4b23><button class="figures__ref" data-v-241a4b23> View in content </button> <img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" alt="Figure 6" data-zoomable="true" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab3.png" data-enlarged-image="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab3.png" class="graphic" data-v-241a4b23></div> <div data-v-241a4b23><div class="caption" data-v-241a4b23><p data-v-241a4b23><span class="label" data-v-241a4b23>Table III.</span> <span data-v-241a4b23><span class="p">Summary of PRISMA Lab contributions in the field of physical human-robot interaction.</span></span></p></div> </div></div> <hr aria-hidden="true" class="separator dashed" data-v-7036083a data-v-241a4b23></div><div data-v-241a4b23><div class="figures__item" data-v-241a4b23><div class="figures__item__image-box" data-v-241a4b23><button class="figures__ref" data-v-241a4b23> View in content </button> <img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" alt="Figure 7" data-zoomable="true" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary-alt:20240319140930-43987-mediumThumb-png-S026357472400033X_fig5.jpg" data-enlarged-image="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary-alt:20240319140930-02137-optimisedImage-png-S026357472400033X_fig5.jpg" class="graphic" data-v-241a4b23></div> <div data-v-241a4b23><div class="caption" data-v-241a4b23><p data-v-241a4b23><span class="label" data-v-241a4b23>Figure 5.</span> <span data-v-241a4b23><span class="p">(a) cognitive control framework compatible with AI methods for planning, reasoning, and learning; (b) task orchestration and situated interpretation of ambiguous human gestures; (c) kinesthetic teaching of structured tasks; combined task and motion plans (d); human-robot collaboration during the execution of a shared task (e).</span></span></p></div> </div></div> <hr aria-hidden="true" class="separator dashed" data-v-7036083a data-v-241a4b23></div><div data-v-241a4b23><div class="figures__item" data-v-241a4b23><div class="figures__item__image-box" data-v-241a4b23><button class="figures__ref" data-v-241a4b23> View in content </button> <img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" alt="Figure 8" data-zoomable="true" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary-alt:20240319140930-82955-mediumThumb-png-S026357472400033X_fig6.jpg" data-enlarged-image="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary-alt:20240319140930-77865-optimisedImage-png-S026357472400033X_fig6.jpg" class="graphic" data-v-241a4b23></div> <div data-v-241a4b23><div class="caption" data-v-241a4b23><p data-v-241a4b23><span class="label" data-v-241a4b23>Figure 6.</span> <span data-v-241a4b23><span class="p">A shared control telerobotic system consists of a local device used to jointly send partial commands and receive computed haptic information as feedback from the remote side. The user usually observes the remote environment by means of a camera that provides a limited awareness of the scene. In (a), the robot must execute a remote object grasping task [91]. In this case, provided haptic information aims to increase the situational awareness of the operator informing about the proximity to the robot’s joint limits and singularities. In (b) and (c), vision-based or programmed virtual fixtures aid the execution of the task in industrial and surgical robotic settings, respectively [92, 93]. In (d), a non-prehensile object transportation scenario is considered and haptic feedback is provided about the proximity to the sliding conditions of the object placed on the tray [19].</span></span></p></div> </div></div> <hr aria-hidden="true" class="separator dashed" data-v-7036083a data-v-241a4b23></div><div data-v-241a4b23><div class="figures__item" data-v-241a4b23><div class="figures__item__image-box" data-v-241a4b23><button class="figures__ref" data-v-241a4b23> View in content </button> <img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" alt="Figure 9" data-zoomable="true" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab4.png" data-enlarged-image="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab4.png" class="graphic" data-v-241a4b23></div> <div data-v-241a4b23><div class="caption" data-v-241a4b23><p data-v-241a4b23><span class="label" data-v-241a4b23>Table IV.</span> <span data-v-241a4b23><span class="p">Summary of PRISMA Lab contributions in the field of AI and cognitive robotics.</span></span></p></div> </div></div> <hr aria-hidden="true" class="separator dashed" data-v-7036083a data-v-241a4b23></div><div data-v-241a4b23><div class="figures__item" data-v-241a4b23><div class="figures__item__image-box" data-v-241a4b23><button class="figures__ref" data-v-241a4b23> View in content </button> <img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" alt="Figure 10" data-zoomable="true" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab5.png" data-enlarged-image="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab5.png" class="graphic" data-v-241a4b23></div> <div data-v-241a4b23><div class="caption" data-v-241a4b23><p data-v-241a4b23><span class="label" data-v-241a4b23>Table V.</span> <span data-v-241a4b23><span class="p">Summary of PRISMA Lab contributions in the field of industrial robotics.</span></span></p></div> </div></div> <hr aria-hidden="true" class="separator dashed" data-v-7036083a data-v-241a4b23></div><div data-v-241a4b23><div class="figures__item" data-v-241a4b23><div class="figures__item__image-box" data-v-241a4b23><button class="figures__ref" data-v-241a4b23> View in content </button> <img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" alt="Figure 11" data-zoomable="true" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary-alt:20240319140930-29125-mediumThumb-png-S026357472400033X_fig7.jpg" data-enlarged-image="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary-alt:20240319140930-72110-optimisedImage-png-S026357472400033X_fig7.jpg" class="graphic" data-v-241a4b23></div> <div data-v-241a4b23><div class="caption" data-v-241a4b23><p data-v-241a4b23><span class="label" data-v-241a4b23>Figure 7.</span> <span data-v-241a4b23><span class="p">Overall picture of the logistic scenario including an abstract representation of vision-based recognition and localization algorithm (left), snapshot of the robotic depalletizing cell (right) with highlighted detail of the gripping tool (red window).</span></span></p></div> </div></div> <hr aria-hidden="true" class="separator dashed" data-v-7036083a data-v-241a4b23></div><div data-v-241a4b23><div class="figures__item" data-v-241a4b23><div class="figures__item__image-box" data-v-241a4b23><button class="figures__ref" data-v-241a4b23> View in content </button> <img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" alt="Figure 12" data-zoomable="true" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab6.png" data-enlarged-image="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab6.png" class="graphic" data-v-241a4b23></div> <div data-v-241a4b23><div class="caption" data-v-241a4b23><p data-v-241a4b23><span class="label" data-v-241a4b23>Table VI.</span> <span data-v-241a4b23><span class="p">Summary of PRISMA Lab contributions in the field of medical robotics.</span></span></p></div> </div></div> <hr aria-hidden="true" class="separator dashed" data-v-7036083a data-v-241a4b23></div><div data-v-241a4b23><div class="figures__item" data-v-241a4b23><div class="figures__item__image-box" data-v-241a4b23><button class="figures__ref" data-v-241a4b23> View in content </button> <img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" alt="Figure 13" data-zoomable="true" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary-alt:20240319140930-90309-mediumThumb-png-S026357472400033X_fig8.jpg" data-enlarged-image="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary-alt:20240319140930-63707-optimisedImage-png-S026357472400033X_fig8.jpg" class="graphic" data-v-241a4b23></div> <div data-v-241a4b23><div class="caption" data-v-241a4b23><p data-v-241a4b23><span class="label" data-v-241a4b23>Figure 8.</span> <span data-v-241a4b23><span class="p">Left: a marker-less method tracks surgical tools, establishing VF geometry resembling to a cylinder with its central axis aligned with the instrument’s axis [144]; right: the MUSHA Hand II surgical tool, integrated on the dVRK robot [145–147].</span></span></p></div> </div></div> <hr aria-hidden="true" class="separator dashed" data-v-7036083a data-v-241a4b23></div><div data-v-241a4b23><div class="figures__item" data-v-241a4b23><div class="figures__item__image-box" data-v-241a4b23><button class="figures__ref" data-v-241a4b23> View in content </button> <img src="data:image/gif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==" alt="Figure 14" data-zoomable="true" data-src="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary-alt:20240319140930-38486-mediumThumb-png-S026357472400033X_fig9.jpg" data-enlarged-image="https://static.cambridge.org/binary/version/id/urn:cambridge.org:id:binary-alt:20240319140930-72669-optimisedImage-png-S026357472400033X_fig9.jpg" class="graphic" data-v-241a4b23></div> <div data-v-241a4b23><div class="caption" data-v-241a4b23><p data-v-241a4b23><span class="label" data-v-241a4b23>Figure 9.</span> <span data-v-241a4b23><span class="p">The PRISMA Hand II and its capabilities. The grasping options are categorized into three sets: (a) lateral grasps, (b) pinches, and (c) power grasps [154, 155].</span></span></p></div> </div></div> <!----></div></div> <!----> <!----> <!----> <!----> <!----> <!----> <!----> <!----> <div id="metrics-tab" publication-date="20 March 2024" class="metrics tab-pane" data-v-c41a0c86><div class="app-loader" data-v-c41a0c86></div></div></div></div> <!----></div> <div role="complementary" aria-label="related contents" class="column__main__right" data-v-01274b1d><div class="access-block row access-status" data-v-5fad35b8 data-v-01274b1d><span class="has-access" data-v-5fad35b8><img src="data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMTQiIGhlaWdodD0iMTQiIHZpZXdCb3g9IjAgMCAxNCAxNCIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHBhdGggZmlsbC1ydWxlPSJldmVub2RkIiBjbGlwLXJ1bGU9ImV2ZW5vZGQiIGQ9Ik0xNCA3QzE0IDEwLjg2NjUgMTAuODY2NSAxNCA3IDE0QzMuMTMzNDUgMTQgMCAxMC44NjY1IDAgN0MwIDMuMTMzNDUgMy4xMzM0NSAwIDcgMEMxMC44NjY1IDAgMTQgMy4xMzM0NSAxNCA3WiIgZmlsbD0iIzFGOTYzOCIvPgo8cGF0aCBmaWxsLXJ1bGU9ImV2ZW5vZGQiIGNsaXAtcnVsZT0iZXZlbm9kZCIgZD0iTTUuOTA4MTUgMTAuNjczTDIuNTQ2ODggNy4zMTE3Nkw0LjM0NjUxIDUuNTEyMTNMNS45MDgxNSA3LjA3Mzc2TDkuNjUyNTEgMy4zMjgxMkwxMS40NTIxIDUuMTI5MDNMNS45MDgxNSAxMC42NzNaIiBmaWxsPSIjRkVGRUZFIi8+Cjwvc3ZnPgo=" alt="" class="app-icon access" data-v-d2c09870 data-v-5fad35b8> <span class="sr-only" data-v-5fad35b8>You have </span> Access </span> <!----></div> <!----> <!----> <!----> <!----></div></div></div></div></div> <div id="cited-by-modal" role="dialog" aria-labelledby="Cited by modal" aria-hidden="true" class="modal fade" data-v-014d05be data-v-01274b1d><div class="modal-dialog modal-xl cited-by-modal" data-v-014d05be><div tabindex="-1" class="modal-content" data-v-014d05be><div class="modal-header" data-v-014d05be><h1 class="modal-header__heading" data-v-014d05be>Cited by</h1></div> <div class="modal-body" data-v-014d05be><div class="modal-body__loader" data-v-014d05be><div class="modal-body__loader__spinner" data-v-014d05be></div> <p class="modal-body__loader__message" data-v-014d05be>Loading...</p></div></div> <button aria-label="Close Cited by" aria-expanded="false" data-dismiss="modal" class="app-button cited-by-modal__button--close app-button__icon app-button--" data-v-2a038744 data-v-014d05be><img src="data:image/svg+xml;base64,CjxzdmcgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIiB3aWR0aD0iMzciIGhlaWdodD0iMzciIHZpZXdCb3g9IjAgMCAzNyAzNyI+PHBhdGggZmlsbD0iI0ZGRiIgZD0iTTIyLjMgMjAuNWwtMi0yLS4xLS4xLjEtLjEgMi0yYy41LS41LjYtMS40LjEtMS44LS41LS41LTEuMy0uNC0xLjguMWwtMiAyLS4xLjEtLjEtLjEtMi0yYy0uNS0uNS0xLjQtLjYtMS44LS4xLS41LjUtLjQgMS4zLjEgMS44bDIgMiAuMS4xLS4xLjEtMiAyYy0uNS41LS42IDEuNC0uMSAxLjguNS41IDEuMy40IDEuOC0uMWwyLTIgLjEtLjEuMS4xIDIgMmMuNS41IDEuNC42IDEuOC4xLjUtLjQuNC0xLjItLjEtMS44eiIvPjxwYXRoIGZpbGw9IiNGRkYiIGQ9Ik0xOC41IDM2QzguOSAzNiAxIDI4LjEgMSAxOC41UzguOSAxIDE4LjUgMSAzNiA4LjkgMzYgMTguNSAyOC4xIDM2IDE4LjUgMzZ6bTAtMzRDOS40IDIgMiA5LjQgMiAxOC41UzkuNCAzNSAxOC41IDM1IDM1IDI3LjYgMzUgMTguNSAyNy42IDIgMTguNSAyeiIvPjwvc3ZnPg==" alt="" class="app-icon icon close-modal" data-v-d2c09870 data-v-2a038744> <!----></button></div></div></div></div></div></div></div><script>window.__NUXT__=(function(a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,_,$,aa,ab,ac,ad,ae,af,ag,ah,ai,aj,ak,al,am,an,ao,ap,aq,ar,as,at,au,av,aw,ax,ay,az,aA,aB,aC,aD,aE,aF,aG,aH,aI,aJ,aK,aL,aM,aN,aO,aP,aQ,aR,aS,aT,aU,aV,aW,aX,aY,aZ,a_,a$,ba,bb,bc,bd,be,bf,bg,bh,bi,bj,bk,bl,bm,bn,bo,bp,bq,br,bs,bt,bu,bv,bw,bx,by,bz,bA,bB,bC,bD,bE,bF,bG,bH,bI,bJ,bK,bL,bM,bN,bO,bP,bQ,bR,bS,bT,bU,bV,bW,bX,bY,bZ,b_,b$,ca,cb,cc,cd,ce,cf,cg,ch,ci,cj,ck,cl,cm,cn,co,cp,cq,cr,cs,ct,cu,cv,cw,cx,cy,cz,cA,cB,cC,cD,cE,cF,cG,cH,cI,cJ,cK,cL,cM,cN,cO,cP,cQ,cR,cS,cT,cU,cV,cW,cX,cY,cZ,c_,c$,da,db,dc,dd,de,df,dg,dh,di,dj,dk,dl,dm,dn,do0,dp,dq,dr,ds,dt,du,dv,dw,dx,dy,dz,dA,dB,dC,dD,dE,dF,dG,dH,dI,dJ,dK,dL,dM,dN,dO,dP,dQ,dR,dS,dT,dU,dV,dW,dX,dY,dZ,d_,d$,ea,eb,ec,ed,ee,ef,eg,eh,ei,ej,ek,el,em,en,eo,ep,eq,er,es,et,eu,ev,ew,ex,ey,ez,eA,eB,eC,eD,eE,eF,eG,eH,eI,eJ,eK,eL,eM,eN,eO,eP,eQ,eR,eS,eT,eU,eV,eW,eX,eY,eZ,e_,e$,fa,fb,fc,fd,fe,ff,fg,fh,fi,fj,fk,fl,fm,fn,fo,fp,fq,fr,fs,ft,fu,fv,fw,fx,fy,fz,fA,fB,fC,fD,fE,fF,fG,fH,fI,fJ,fK,fL,fM,fN,fO,fP,fQ,fR,fS,fT,fU,fV){return {layout:"default",data:[{article:{id:"5DA1E6B0701411F71E5FFC40F2E53847",metadata:{title:K,htmlTitle:K,subtitle:a,authorsGroup:{authors:{contributors:[{givenNames:"Mario",surname:"Selvaggio",nameStyle:f,affiliations:[{text:u}],isCorresponding:b,notes:"\u003Cdiv class=\"corresp\"\u003E\u003Cspan class=\"bold\"\u003ECorresponding author:\u003C\u002Fspan\u003E Mario Selvaggio; Email: \u003Ca href=\"mailto:mario.selvaggio@unina.it\"\u003Emario.selvaggio@unina.it\u003C\u002Fa\u003E\u003C\u002Fdiv\u003E",isAnonymous:c,fullName:"Mario Selvaggio",searchUrl:"\u002Fcore\u002Fsearch?filters%5BauthorTerms%5D=Mario%20Selvaggio&eventCode=SE-AU",orcidUrl:"https:\u002F\u002Forcid.org\u002F0000-0002-2460-1914"},{givenNames:"Rocco",surname:"Moccia",nameStyle:f,affiliations:[{text:u}],isCorresponding:c,notes:a,isAnonymous:c,fullName:"Rocco Moccia",searchUrl:"\u002Fcore\u002Fsearch?filters%5BauthorTerms%5D=Rocco%20Moccia&eventCode=SE-AU",orcidUrl:f},{givenNames:"Pierluigi",surname:"Arpenti",nameStyle:f,affiliations:[{text:u}],isCorresponding:c,notes:a,isAnonymous:c,fullName:"Pierluigi Arpenti",searchUrl:"\u002Fcore\u002Fsearch?filters%5BauthorTerms%5D=Pierluigi%20Arpenti&eventCode=SE-AU",orcidUrl:f},{givenNames:"Riccardo",surname:"Caccavale",nameStyle:f,affiliations:[{text:u}],isCorresponding:c,notes:a,isAnonymous:c,fullName:"Riccardo Caccavale",searchUrl:"\u002Fcore\u002Fsearch?filters%5BauthorTerms%5D=Riccardo%20Caccavale&eventCode=SE-AU",orcidUrl:f},{givenNames:"Fabio",surname:"Ruggiero",nameStyle:f,affiliations:[{text:u}],isCorresponding:c,notes:a,isAnonymous:c,fullName:"Fabio Ruggiero",searchUrl:"\u002Fcore\u002Fsearch?filters%5BauthorTerms%5D=Fabio%20Ruggiero&eventCode=SE-AU",orcidUrl:f},{givenNames:"Jonathan",surname:"Cacace",nameStyle:f,affiliations:[{text:u}],isCorresponding:c,notes:a,isAnonymous:c,fullName:"Jonathan Cacace",searchUrl:"\u002Fcore\u002Fsearch?filters%5BauthorTerms%5D=Jonathan%20Cacace&eventCode=SE-AU",orcidUrl:f},{givenNames:"Fanny",surname:"Ficuciello",nameStyle:f,affiliations:[{text:u}],isCorresponding:c,notes:a,isAnonymous:c,fullName:"Fanny Ficuciello",searchUrl:"\u002Fcore\u002Fsearch?filters%5BauthorTerms%5D=Fanny%20Ficuciello&eventCode=SE-AU",orcidUrl:f},{givenNames:"Alberto",surname:"Finzi",nameStyle:f,affiliations:[{text:u}],isCorresponding:c,notes:a,isAnonymous:c,fullName:"Alberto Finzi",searchUrl:"\u002Fcore\u002Fsearch?filters%5BauthorTerms%5D=Alberto%20Finzi&eventCode=SE-AU",orcidUrl:f},{givenNames:"Vincenzo",surname:"Lippiello",nameStyle:f,affiliations:[{text:u}],isCorresponding:c,notes:a,isAnonymous:c,fullName:"Vincenzo Lippiello",searchUrl:"\u002Fcore\u002Fsearch?filters%5BauthorTerms%5D=Vincenzo%20Lippiello&eventCode=SE-AU",orcidUrl:f},{givenNames:"Luigi",surname:"Villani",nameStyle:f,affiliations:[{text:u}],isCorresponding:c,notes:a,isAnonymous:c,fullName:"Luigi Villani",searchUrl:"\u002Fcore\u002Fsearch?filters%5BauthorTerms%5D=Luigi%20Villani&eventCode=SE-AU",orcidUrl:f},{givenNames:"Bruno",surname:"Siciliano",nameStyle:f,affiliations:[{text:u}],isCorresponding:c,notes:a,isAnonymous:c,fullName:"Bruno Siciliano",searchUrl:"\u002Fcore\u002Fsearch?filters%5BauthorTerms%5D=Bruno%20Siciliano&eventCode=SE-AU",orcidUrl:f}]},translators:{contributors:[],label:"Translated by"}},collections:[{link:"\u002Fcore\u002Fproduct\u002Fidentifier\u002FTHE_40TH_ANNIVERSARY_OF_ROBOTICA\u002Ftype\u002FBESPOKE_COLLECTION",id:"THE_40TH_ANNIVERSARY_OF_ROBOTICA",title:"The 40th Anniversary of Robotica"}],publishedDate:"20 March 2024",keywords:[{url:"\u002Fcore\u002Fsearch?filters[keywords]=aerial robotics",name:"aerial robotics"},{url:"\u002Fcore\u002Fsearch?filters[keywords]=control of robotic systems",name:"control of robotic systems"},{url:"\u002Fcore\u002Fsearch?filters[keywords]=legged robots",name:"legged robots"},{url:"\u002Fcore\u002Fsearch?filters[keywords]=non-prehensile manipulation",name:"non-prehensile manipulation"},{url:"\u002Fcore\u002Fsearch?filters[keywords]=surgical robots",name:"surgical robots"},{url:"\u002Fcore\u002Fsearch?filters[keywords]=teleoperation",name:"teleoperation"}],openPracticeBadges:[],doi:{url:"https:\u002F\u002Fdoi.org\u002F10.1017\u002FS026357472400033X",value:"10.1017\u002FS026357472400033X"},copyright:{statement:["© The Author(s), 2024. Published by Cambridge University Press"],holder:["The Author(s)"],year:[2024]},creativeCommons:f,acceptedManuscript:c,type:"review-article",pageRange:{range:"1 - 28",firstPage:E,lastPage:"28"},typeDescription:"Review Article",resultTypes:[],commentsCount:V,topicsAndSubtopics:f},journal:{id:aw,title:L,titleSlug:ax,mnemonic:"ROB",titleHistory:[],isFirstView:b,journalSlug:ax,isCompanion:c,parentCompanionJournalName:L,associatedParentCollection:f,paymentInfo:{prices:{"£":{price:26,sku:M,skuNew:F,currency:"£"},"€":{price:31,sku:M,skuNew:F,currency:"€"},US$:{price:36,sku:M,skuNew:F,currency:"US$"},AU$:{price:51,sku:M,skuNew:F,currency:"AU$"}}},url:ay,firstViewUrl:az,coverUrl:"https:\u002F\u002Fstatic.cambridge.org\u002Fcovers\u002FROB_0_0_0\u002Frobotica.jpg",submitMaterialsUrl:"\u002Fcore\u002Fjournals\u002Frobotica\u002Finformation\u002Fauthor-instructions\u002Fsubmitting-your-materials",hasHistory:c,latestTitle:L,latestId:aw,hasPastTitle:c},abstract:{textAbstracts:[{title:"Abstract",content:"\u003Cdiv class=\"abstract\" data-abstract-type=\"normal\"\u003E\u003Cp\u003EIn this article, we review the main results achieved by the research activities carried out at PRISMA Lab of the University of Naples Federico II where, for 35 years, an interdisciplinary team of experts developed robots that are ultimately useful to humans. We summarize the key contributions made in the last decade in the six research areas of dynamic manipulation and locomotion, aerial robotics, human-robot interaction, artificial intelligence and cognitive robotics, industrial robotics, and medical robotics. After a brief overview of each research field, the most significant methodologies and results are reported and discussed, highlighting their cross-disciplinary and translational aspects. Finally, the potential future research directions identified are discussed.\u003C\u002Fp\u003E\u003C\u002Fdiv\u003E",lang:aA}]},content:{html:"\u003Cdiv class=\"article review-article NLM\"\u003E\n\n\u003Cdiv class=\"body\"\u003E\n\u003Cdiv class=\"sec intro\" data-magellan-destination=\"s1\" id=\"s1\"\u003E\n\n\u003Ch2 class=\"A\"\u003E\u003Cspan class=\"label\"\u003E1.\u003C\u002Fspan\u003E Introduction\u003C\u002Fh2\u003E\n\u003Cp class=\"p\"\u003E Developing robots that are ultimately useful and acceptable to humans has always been one of the major motivations for research in robotics. Potentially, robots can alleviate humans from performing dangerous jobs or working in hazardous conditions. They can handle lifting heavy weights, toxic substances, and repetitive tasks. Inspired by this, in labs and research centers across the world, interdisciplinary teams of experts coordinate their everyday efforts to pursue the goal of developing intelligent robotic systems that fulfill this scope. It is their duty and dream to push the boundary of robotics as a science, overcoming the current theoretical and technological limits, and making robots work closer to humans in our everyday living spaces. In this article, we review the main results achieved in this direction during the last decade by the robotics research carried out at PRISMA Lab of the University of Naples Federico II. The lab has been active in robotics research for 35 years now, and its team is internationally recognized in the community for its achievements. Given this long-standing expertise, the research work carried out at PRISMA Lab is tied to a solid basis and aims to bring groundbreaking results that have far-reaching impacts.\u003C\u002Fp\u003E\u003Csection\u003E\u003Cdiv class=\"fig\" data-magellan-destination=\"f1\" id=\"f1\"\u003E\n\n\n\u003Cdiv class=\"figure-thumb\"\u003E\u003Cimg src=\"data:image\u002Fgif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==\" data-src=\"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig1.png?pub-status=live\" class=\"aop-lazy-load-image\" width=\"4270\" height=\"3215\" data-original-image=\"\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig1.png\" data-zoomable=\"true\"\u003E\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"caption\"\u003E\u003Cp class=\"p\"\u003E \n\u003C\u002Fp\u003E\u003Cp class=\"p\"\u003E\u003Cspan class=\"label\"\u003EFigure 1.\u003C\u002Fspan\u003E Graphical representation of the six research areas and sub-areas dealt with within the PRISMA Lab at the University of Naples Federico II. This article proposes an overview of the main problems addressed in these fields and discuss potential future directions on the topics.\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\u003C\u002Fdiv\u003E\u003C\u002Fsection\u003E\n\n\u003Cp class=\"p\"\u003E Over the years, the team effort has been directed mainly toward six rapidly growing areas (and related sub-areas) of robotics that are dynamic manipulation and locomotion, aerial robotics, physical human-robot interaction (HRI), artificial intelligence (AI) and cognitive robotics, industrial robotics, and medical robotics (see Fig. \u003Ca class=\"xref fig\" href=\"#f1\"\u003E1\u003C\u002Fa\u003E). The six research areas listed above fulfill in different ways the primary scope of supporting humans in their daily activities. Advanced manipulation skills allow robots to naturally act in anthropic environments by exploiting available affordances that are typically designed for humans. In this context, dynamic and non-prehensile manipulation techniques allow robots to extend their manipulative capabilities as described in Sec. \u003Ca class=\"xref sec\" href=\"#s2\"\u003E2\u003C\u002Fa\u003E. Surprisingly, many methodologies used for non-prehensile manipulation also apply to legged robot locomotion. Motivated by this, the same section provides insights from recent legged robotics research. Aerial robots have been developed to perform tasks in high altitudes and difficult-to-access scenarios that cannot be easily reached or are too dangerous for human operators. To this end, the capability of interacting with the environment was recently integrated into control frameworks for aerial robots as can be seen in Sec. \u003Ca class=\"xref sec\" href=\"#s3\"\u003E3\u003C\u002Fa\u003E. Robots can support humans by substituting or by cooperating with them either proximally or remotely. In both cases, issues related to the interaction between a human and a robot may arise. As detailed in Sec. \u003Ca class=\"xref sec\" href=\"#s4\"\u003E4\u003C\u002Fa\u003E, physical HRI techniques must be considered to guarantee a safe and dependable behavior of collaborative robots (or cobots), for example, by designing suitable control schemes for reactive collision avoidance, compliance, and task-based interaction. In addition, in both human-robot cooperation and autonomous task execution, robots exhibiting cognitive capabilities are beneficial. We tackle the issue of deploying robots in dynamic and human-populated environments by integrating AI-based methods with cognitive control frameworks into robotic systems to allow flexible execution, planning, and monitoring of structured tasks as proposed in Sec. \u003Ca class=\"xref sec\" href=\"#s5\"\u003E5\u003C\u002Fa\u003E. The manipulation and AI methodologies were recently adopted in the field of industrial robotics by considering logistics as a main application as can be seen in Sec. \u003Ca class=\"xref sec\" href=\"#s6\"\u003E6\u003C\u002Fa\u003E. In this case, intelligent robotic systems are deployed to alleviate human operators from the execution of tedious and repetitive tasks. Differently, in the medical field, robots are directly conceived and programmed to extend human capabilities by performing super-precise surgical operations or acting as limb substitutes as described in Sec. \u003Ca class=\"xref sec\" href=\"#s7\"\u003E7\u003C\u002Fa\u003E.\u003C\u002Fp\u003E\n\u003Cp class=\"p\"\u003E In the following sections, we report the main achievements in each of these areas, highlighting the adopted methodologies and the key contributions with respect to the state of the art on the topic. Finally, potential future research directions in each field are discussed in Sec. \u003Ca class=\"xref sec\" href=\"#s8\"\u003E8\u003C\u002Fa\u003E. Thus, the main contributions of this paper can be listed as follows:\u003C\u002Fp\u003E\u003Cul class=\"list nomark\"\u003E\n\u003Cli class=\"list-item\"\u003E\n\n\u003Cp class=\"p\"\u003E\u003Cspan class=\"label\"\u003E•\u003C\u002Fspan\u003E We present a thorough review of the most recent work in the above-mentioned six research areas dealt with by the PRISMA Lab, highlighting the adopted methodologies and the key results achieved in the fields;\u003C\u002Fp\u003E\n\u003C\u002Fli\u003E\n\u003Cli class=\"list-item\"\u003E\n\n\u003Cp class=\"p\"\u003E\u003Cspan class=\"label\"\u003E•\u003C\u002Fspan\u003E For each research area, we propose an overview of the field, reporting both seminal and state-of-the-art works, and identify potential future research directions on the topics.\u003C\u002Fp\u003E\n\u003C\u002Fli\u003E\n\u003C\u002Ful\u003E\n\n\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"sec other\" data-magellan-destination=\"s2\" id=\"s2\"\u003E\n\n\u003Ch2 class=\"A\"\u003E\u003Cspan class=\"label\"\u003E2.\u003C\u002Fspan\u003E Dynamic manipulation and locomotion\u003C\u002Fh2\u003E\n\u003Cp class=\"p\"\u003E The ways robots use to transport themselves or objects around share many similarities. Robots realize manipulation and locomotion tasks by physically establishing contacts and regulating the exchange of forces with the world around them [\u003Ca class=\"xref bibr\" href=\"#ref1\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Suomalainen, Karayiannidis and Kyrki\u003C\u002Fspan\u003E1\u003C\u002Fa\u003E]. With the technological advancements in both sensing and actuation speed, it is now possible to manipulate an object speedily and achieve stable locomotion across challenging terrains [\u003Ca class=\"xref bibr\" href=\"#ref2\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Yang, Zhang, Zeng, Agrawal and Sreenath\u003C\u002Fspan\u003E2\u003C\u002Fa\u003E]. In dynamic manipulation and locomotion, an important role is played by forces and accelerations, which are used together with kinematics, statics, and quasi-static forces to achieve the task. Dynamic non-prehensile manipulation of an object extends its feasible movements exploiting motion primitives such as rolling [\u003Ca class=\"xref bibr\" href=\"#ref3\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Serra, Ruggiero, Donaire, Buonocore, Lippiello and Siciliano\u003C\u002Fspan\u003E3\u003C\u002Fa\u003E], pushing [\u003Ca class=\"xref bibr\" href=\"#ref4\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Chai, Peng and Tsao\u003C\u002Fspan\u003E4\u003C\u002Fa\u003E], throwing, and tossing [\u003Ca class=\"xref bibr\" href=\"#ref5\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Satici, Ruggiero, Lippiello and Siciliano\u003C\u002Fspan\u003E5\u003C\u002Fa\u003E], that inherently use the dynamics of both the robot and the manipulated object [\u003Ca class=\"xref bibr\" href=\"#ref6\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Ruggiero, Lippiello and Siciliano\u003C\u002Fspan\u003E6\u003C\u002Fa\u003E]. Non-prehensile manipulation, specifically juggling, exhibits connections with legged locomotion regarding the hybrid nature of the related dynamics, the zero-moment-point stability [\u003Ca class=\"xref bibr\" href=\"#ref7\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Sardain and Bessonnet\u003C\u002Fspan\u003E7\u003C\u002Fa\u003E], and the dynamic balancing conditions [\u003Ca class=\"xref bibr\" href=\"#ref8\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Farid, Siciliano and Ruggiero\u003C\u002Fspan\u003E8\u003C\u002Fa\u003E]. It was observed that the stability conditions for non-prehensile dynamic object manipulation and the support phase of a walking biped share the same set of equations. This fundamental concept can be leveraged to seamlessly transfer sensing, planning, and control frameworks developed for one field to the other. Among such control frameworks, energy-based control approaches can be exploited for both dynamic non-prehensile manipulation tasks and locomotion ones. The key role played by energy during biped locomotion was enlightened in passive-dynamic walking [\u003Ca class=\"xref bibr\" href=\"#ref9\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference McGeer\u003C\u002Fspan\u003E9\u003C\u002Fa\u003E]. Consequently, several control frameworks exploiting energy-related concepts were proposed through the years [\u003Ca class=\"xref bibr\" href=\"#ref10\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Holm and Spong\u003C\u002Fspan\u003E10\u003C\u002Fa\u003E–\u003Ca class=\"xref bibr\" href=\"#ref12\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Spong, Holm and Lee\u003C\u002Fspan\u003E12\u003C\u002Fa\u003E] to realize specific gaits with the sought features. Locomotion considered in the aforementioned papers occurs in ideal conditions, that is, in the absence of external forces acting on legs. On the other hand, the investigation of resilience to external disturbances has been a prominent focus over the years, encompassing both quadruped and biped robots. This emphasis stems from the crucial ability of legged robots to navigate challenging terrain, where the irregularity of the ground may result in an early impact of the foot, leading to external forces affecting the system [\u003Ca class=\"xref bibr\" href=\"#ref13\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Mao, Gao, Tian and Zhao\u003C\u002Fspan\u003E13\u003C\u002Fa\u003E]. A momentum-based observer detecting the anticipated foot touchdown was presented in [\u003Ca class=\"xref bibr\" href=\"#ref14\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Bledt, Wensing, Ingersoll and Kim\u003C\u002Fspan\u003E14\u003C\u002Fa\u003E] while disturbances applied on the center of mass only were considered in [\u003Ca class=\"xref bibr\" href=\"#ref15\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Fahmi, Mastalli, Focchi and Semini\u003C\u002Fspan\u003E15\u003C\u002Fa\u003E], neglecting the presence of external forces acting on swing legs. While using an observer for external wrenches on the center of mass or stance feet can enhance locomotion on uneven terrains, it does not prevent the robot from falling after a significant impact on the swing leg. This collision results in a deviation of the foot from the planned motion, potentially causing the touchdown to occur far from the intended foothold. This, in turn, reduces the support polygon, destabilizing the robot. In severe cases, the swing leg might not touch the ground or collide with another leg, leading to a robot fall. Consequently, there is a need to estimate external forces acting on swing legs and compensate for these disturbances. In the following sections, we report an overview of the main achievements of the two research fields whereas Table \u003Ca class=\"xref table\" href=\"#tblI\"\u003EI\u003C\u002Fa\u003E provides a summary of the recent contributions related to these aspects.\u003C\u002Fp\u003E\u003Cdiv class=\"table-wrap\" data-magellan-destination=\"tblI\" id=\"tblI\"\u003E\n\n\u003Cdiv class=\"caption\"\u003E\n\u003Cp class=\"p\"\u003E\u003Cspan class=\"label\"\u003ETable I.\u003C\u002Fspan\u003E Summary of PRISMA Lab contributions in the field of dynamic manipulation and locomotion.\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\n\u003Cspan\u003E\n\n\u003Cdiv class=\"figure-thumb\"\u003E\u003Cimg src=\"data:image\u002Fgif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==\" data-src=\"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab1.png?pub-status=live\" class=\"aop-lazy-load-image\" width=\"956\" height=\"442\" data-original-image=\"\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab1.png\" data-zoomable=\"true\"\u003E\u003C\u002Fdiv\u003E\n\u003C\u002Fspan\u003E\n\u003C\u002Fdiv\u003E\n\n\u003Cdiv class=\"sec\" data-magellan-destination=\"s2-1\" id=\"s2-1\"\u003E\n\n\u003Ch3 class=\"B\"\u003E\u003Cspan class=\"label\"\u003E2.1.\u003C\u002Fspan\u003E Dynamic non-prehensile manipulation\u003C\u002Fh3\u003E\n\u003Cp class=\"p\"\u003E Manipulation pertains to making an intentional change in the environment or to objects that are being manipulated. When realized without completely restraining the object, manipulation is denoted as non-prehensile [\u003Ca class=\"xref bibr\" href=\"#ref6\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Ruggiero, Lippiello and Siciliano\u003C\u002Fspan\u003E6\u003C\u002Fa\u003E]. The object is then subject to unilateral constraints and, in order to reach the goal, the dynamics both of the object and of the hand manipulating it, together with the related kinematics, static and quasi-static forces, must be exploited [\u003Ca class=\"xref bibr\" href=\"#ref6\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Ruggiero, Lippiello and Siciliano\u003C\u002Fspan\u003E6\u003C\u002Fa\u003E]. The literature on the topic states that the conventional way to cope with a non-prehensile dynamic manipulation task is to split it into simpler subtasks, usually referred to as non-prehensile manipulation primitives, that is rolling, dynamic grasp, sliding, pushing, throwing, etc.\u003C\u002Fp\u003E\u003Csection\u003E\u003Cdiv class=\"fig\" data-magellan-destination=\"f2\" id=\"f2\"\u003E\n\n\n\u003Cdiv class=\"figure-thumb\"\u003E\u003Cimg src=\"data:image\u002Fgif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==\" data-src=\"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig2.png?pub-status=live\" class=\"aop-lazy-load-image\" width=\"4257\" height=\"1913\" data-original-image=\"\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig2.png\" data-zoomable=\"true\"\u003E\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"caption\"\u003E\u003Cp class=\"p\"\u003E \n\u003C\u002Fp\u003E\u003Cp class=\"p\"\u003E\u003Cspan class=\"label\"\u003EFigure 2.\u003C\u002Fspan\u003E Tray-based and pushing non-prehensile object manipulation scenarios. Upper row: a robot is tasked with transporting an object placed on a tray-like end-effector along a predefined, fast trajectory while avoiding the relative sliding (a) [\u003Ca class=\"xref bibr\" href=\"#ref20\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Selvaggio, Garg, Ruggiero, Oriolo and Siciliano\u003C\u002Fspan\u003E20\u003C\u002Fa\u003E]. The robot performs a linear transporting trajectory while opportunely inclining the tray to improve the robustness of the task performance (b) [\u003Ca class=\"xref bibr\" href=\"#ref19\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Selvaggio, Cacace, Pacchierotti, Ruggiero and Giordano\u003C\u002Fspan\u003E19\u003C\u002Fa\u003E]. Bottom row: an object is pushed by a mobile robot along a trajectory (c) [\u003Ca class=\"xref bibr\" href=\"#ref16\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Bertoncelli, Ruggiero and Sabattini\u003C\u002Fspan\u003E16\u003C\u002Fa\u003E]. Multiple robots can push an object with minimal effort by optimally placing themself around it (d) [\u003Ca class=\"xref bibr\" href=\"#ref17\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Bertoncelli, Selvaggio, Ruggiero and Sabattini\u003C\u002Fspan\u003E17\u003C\u002Fa\u003E].\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\u003C\u002Fdiv\u003E\u003C\u002Fsection\u003E\n\n\u003Cp class=\"p\"\u003E Seminal works carried out in this direction investigate the non-prehensile rolling manipulation problem, where a single object rolls on the surface of a controlled manipulator. In [\u003Ca class=\"xref bibr\" href=\"#ref26\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Ryu, Ruggiero and Lynch\u003C\u002Fspan\u003E26\u003C\u002Fa\u003E], backstepping was used to derive a control technique to stabilize a disk-on-disk rolling manipulation system. The goal was to stabilize by controlling a circular object on the top of a circular hand in the vertical plane. The effect of shapes in the input-state linearization of the considered non-prehensile planar rolling dynamic manipulation systems was later investigated in [\u003Ca class=\"xref bibr\" href=\"#ref40\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Lippiello, Ruggiero and Siciliano\u003C\u002Fspan\u003E40\u003C\u002Fa\u003E]. Given the shapes of both the object and the manipulator, a state transformation was found allowing the possibility to exploit linear controls to stabilize the system.\u003C\u002Fp\u003E\n\u003Cp class=\"p\"\u003E In tray-based non-prehensile manipulation (see Fig. \u003Ca class=\"xref fig\" href=\"#f2\"\u003E2\u003C\u002Fa\u003E – upper row), the tasks of interest for the robotic system are opposite: (1) reconfigure objects in the hand by allowing them to intentionally slide or roll in the right direction; (2) transport objects placed on the tray while preventing them from sliding and falling. In the first case, the pose reconfiguration of a spherical object rolling on a tray-shaped hand, which is in turn actuated by a robot manipulator, was investigated in [\u003Ca class=\"xref bibr\" href=\"#ref3\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Serra, Ruggiero, Donaire, Buonocore, Lippiello and Siciliano\u003C\u002Fspan\u003E3\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref27\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Serra, Ferguson, Ruggiero, Siniscalco, Petit, Lippiello and Siciliano\u003C\u002Fspan\u003E27\u003C\u002Fa\u003E]: the control law is derived following an interconnection-and-damping-assignment passivity-based approach using a port-Hamiltonian (pH) dynamic model of the system. Full pose regulation of the sphere was achieved thanks to a purposely developed planner. In the second case, the objective is to prevent objects’ sliding induced by inertial forces while carrying the object from one place to another. Adaptive tray orientation was shown to help achieve higher linear accelerations during the tracking of a fast trajectory, minimizing the occurrence of object slipping. The idea behind this is to let the tray surface completely counteract the net force acting on the object. A quadratic program was used to compute the optimal robot manipulator torque control input to enforce non-sliding conditions for the object with adaptive tray orientation while also considering the system’s kinematic and dynamic constraints in [\u003Ca class=\"xref bibr\" href=\"#ref21\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Subburaman, Selvaggio and Ruggiero\u003C\u002Fspan\u003E21\u003C\u002Fa\u003E]. Instead, keeping the tray in the upright configuration, a jerk-based model predictive non-sliding manipulation control was proposed in [\u003Ca class=\"xref bibr\" href=\"#ref20\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Selvaggio, Garg, Ruggiero, Oriolo and Siciliano\u003C\u002Fspan\u003E20\u003C\u002Fa\u003E] for the same task showing superior performance: considering the rate-of-change of the joint torque as the output of the controller, a smooth torque control profile is obtained while allowing direct control of the contact forces. Tray-based non-prehensile manipulation was recently used to develop a shared control teleoperation framework for users to safely transport objects using a remotely located robot [\u003Ca class=\"xref bibr\" href=\"#ref19\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Selvaggio, Cacace, Pacchierotti, Ruggiero and Giordano\u003C\u002Fspan\u003E19\u003C\u002Fa\u003E]. The proposed shared control approach shapes the motion commands imparted by the user to the remote robot and automatically regulates the end-effector orientation to more robustly prevent the object from sliding over the tray. Tray-based non-prehensile manipulation with a mobile manipulator dynamically balancing objects on its end-effector without grasping them was presented in [\u003Ca class=\"xref bibr\" href=\"#ref41\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Heins and Schoellig\u003C\u002Fspan\u003E41\u003C\u002Fa\u003E]. A whole-body constrained model predictive controller for a mobile manipulator that balances objects and avoids collisions was developed for the considered task. More recently, researchers have focused on fast slosh-free fluid transportation [\u003Ca class=\"xref bibr\" href=\"#ref42\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Muchacho, Laha, Figueredo and Haddadin\u003C\u002Fspan\u003E42\u003C\u002Fa\u003E]. Here the goal was to generate slosh-free trajectories by controlling the pendulum model of the liquid surface with constrained quadratic program optimization to obtain valid control inputs. This online technique allowed the motion generator to be used for real-time non-prehensile slosh-free teleoperation of liquids [\u003Ca class=\"xref bibr\" href=\"#ref43\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Muchacho, Bien, Laha, Naceri, Figueredo and Haddadin\u003C\u002Fspan\u003E43\u003C\u002Fa\u003E].\u003C\u002Fp\u003E\n\u003Cp class=\"p\"\u003E In those cases in which the object is too heavy or too large to be grasped, pushing an object is a simple solution widely adopted by humans, and the same concept can be thus transferred to robots (see Fig. \u003Ca class=\"xref fig\" href=\"#f2\"\u003E2\u003C\u002Fa\u003E – bottom row). A technique to manipulate an object with a non-holonomic mobile robot using the pushing non-prehensile manipulation primitive was presented in [\u003Ca class=\"xref bibr\" href=\"#ref16\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Bertoncelli, Ruggiero and Sabattini\u003C\u002Fspan\u003E16\u003C\u002Fa\u003E]. Such a primitive involves unilateral constraints associated with the friction between the robot and the manipulated object. Violating this constraint produces the slippage of the object during the manipulation. A linear time-varying model predictive control was designed to properly include the unilateral constraint within the control action. The framework can be extended in the case of multi-robots: a task-oriented contact placement optimization strategy for object pushing that allows calculating optimal contact points minimizing the amplitude of forces required to execute the task was presented in [\u003Ca class=\"xref bibr\" href=\"#ref17\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Bertoncelli, Selvaggio, Ruggiero and Sabattini\u003C\u002Fspan\u003E17\u003C\u002Fa\u003E].\u003C\u002Fp\u003E\n\u003Cp class=\"p\"\u003E Many of the proposed methods handle flat objects with primitive geometric shapes moving quasi-statically on high-friction surfaces, yet they usually make use of complex analytical models or utilize specialized physics engines to predict the outcomes of various interactions. On the other hand, an experience-based approach, which does not require any explicit analytical model or the help of a physics engine was proposed in [\u003Ca class=\"xref bibr\" href=\"#ref44\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Meriçli, Veloso and Akın\u003C\u002Fspan\u003E44\u003C\u002Fa\u003E] where a mobile robot simply experiments with pushable complex 3D real-world objects to observe and memorize their motion characteristics together with the associated motion uncertainties resulting from varying initial caster wheel orientations and potential contacts between the robot and the object. A probabilistic method for autonomous learning of an approximate dynamics model for these objects was presented in [\u003Ca class=\"xref bibr\" href=\"#ref45\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Novin, Yazdani, Merryweather and Hermans\u003C\u002Fspan\u003E45\u003C\u002Fa\u003E]. In this method, the dynamic parameters were learned using a small dataset consisting of force and motion data from interactions between the robot and objects. Based on these concepts, a rearrangement algorithm that relies on only a few known straight-line pushes for some novel object and requires no analytical models, force sensors, or large training datasets was proposed in [\u003Ca class=\"xref bibr\" href=\"#ref4\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Chai, Peng and Tsao\u003C\u002Fspan\u003E4\u003C\u002Fa\u003E]. The authors experimentally verified the performance of their algorithm by rearranging several types of objects by pushing them to any target planar pose.\u003C\u002Fp\u003E\n\u003Cp class=\"p\"\u003E Research on other non-prehensile manipulation primitives further includes sliding (for pizza-baking applications) [\u003Ca class=\"xref bibr\" href=\"#ref28\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Gutiérrez-Giles, Ruggiero, Lippiello and Siciliano\u003C\u002Fspan\u003E28\u003C\u002Fa\u003E], throwing [\u003Ca class=\"xref bibr\" href=\"#ref5\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Satici, Ruggiero, Lippiello and Siciliano\u003C\u002Fspan\u003E5\u003C\u002Fa\u003E], stretching a deformable object [\u003Ca class=\"xref bibr\" href=\"#ref29\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Kim, Ruggiero, Lippiello, Siciliano, Siciliano and Ruggiero\u003C\u002Fspan\u003E29\u003C\u002Fa\u003E], and related ones [\u003Ca class=\"xref bibr\" href=\"#ref30\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Ruggiero, Kim, Gutiérrez-Giles, Satici, Donaire, Cacace, Buonocore, Fontanelli, Lippiello, Siciliano, Gusikhin and Madani\u003C\u002Fspan\u003E30\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref31\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Ruggiero, Petit, Serra, Satici, Cacace, Donaire, Ficuciello, Buonocore, Fontanelli, Lippiello, Villani and Siciliano\u003C\u002Fspan\u003E31\u003C\u002Fa\u003E].\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"sec\" data-magellan-destination=\"s2-2\" id=\"s2-2\"\u003E\n\n\u003Ch3 class=\"B\"\u003E\u003Cspan class=\"label\"\u003E2.2.\u003C\u002Fspan\u003E Legged robotics\u003C\u002Fh3\u003E\n\u003Cp class=\"p\"\u003E Motivated by the connection between bipedal locomotion and non-prehensile manipulation [\u003Ca class=\"xref bibr\" href=\"#ref8\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Farid, Siciliano and Ruggiero\u003C\u002Fspan\u003E8\u003C\u002Fa\u003E], the methodology proposed initially in [\u003Ca class=\"xref bibr\" href=\"#ref3\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Serra, Ruggiero, Donaire, Buonocore, Lippiello and Siciliano\u003C\u002Fspan\u003E3\u003C\u002Fa\u003E] to achieve the stabilization of non-prehensile planar rolling manipulation tasks was subsequently extended to tackle the gait-generation problem of a simple \u003Cem class=\"italic\"\u003Ecompass-like biped robot\u003C\u002Fem\u003E in [\u003Ca class=\"xref bibr\" href=\"#ref34\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Arpenti, Ruggiero and Lippiello\u003C\u002Fspan\u003E34\u003C\u002Fa\u003E]. The common control framework is based on a modification of the well-known \u003Cem class=\"italic\"\u003Einterconnection-and-damping-assignment passivity-based control\u003C\u002Fem\u003E (IDA-PBC) of pH systems, where an appropriate parameterization of the inertia matrix was proposed to avoid the explicit solution of the matching partial differential equations (PDEs) arising during control synthesis. Due to the critical role played by energy exchange during walking, the methodology was profitably applied to passive-dynamic walking. Thanks to the novel control strategy, new gaits were generated, which are manifestly different from the passive gait. The result was a controlled planar walker moving manifestly slower or faster (depending on control tuning) than the open-loop system while preserving the system’s passivity due to the closed-loop pH structure.\u003C\u002Fp\u003E\n\u003Cp class=\"p\"\u003E An alternative constructive methodology, improving some issues present in [\u003Ca class=\"xref bibr\" href=\"#ref3\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Serra, Ruggiero, Donaire, Buonocore, Lippiello and Siciliano\u003C\u002Fspan\u003E3\u003C\u002Fa\u003E], was proposed in [\u003Ca class=\"xref bibr\" href=\"#ref35\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Arpenti, Ruggiero and Lippiello\u003C\u002Fspan\u003E35\u003C\u002Fa\u003E]. In line with the same problem, the effect of dissipative forces deployed in the controller on gait generation was investigated in [\u003Ca class=\"xref bibr\" href=\"#ref36\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Nacusse, Arpenti, Ruggiero and Lippiello\u003C\u002Fspan\u003E36\u003C\u002Fa\u003E]. There, two alternative control methodologies exploiting dissipative forces, termed \u003Cem class=\"italic\"\u003Esimultaneous interconnection-and-damping-assignment passivity-based control\u003C\u002Fem\u003E (SIDA-PBC) and \u003Cem class=\"italic\"\u003Eenergy pumping-and-damping passivity-based control\u003C\u002Fem\u003E (EPD-PBC), respectively, demonstrated better results in achieving slow gaits, characterized by small step lengths and large step periods, compared to the performance of the IDA-PBC. SIDA-PBC carries out the energy shaping and the damping injection simultaneously, thanks to dissipative forces in the desired dynamics, differently from IDA-PBC, where these two control actions are carried out in two distinct steps. On the other hand, EPD-PBC proved to be an efficient control strategy to face another control task belonging to the realm of legged locomotion, namely the \u003Cem class=\"italic\"\u003Egait robustification\u003C\u002Fem\u003E problem, that is, the enlargement of the basin of attraction of the limit cycle associated with the natural passive gait of the compass-like biped [\u003Ca class=\"xref bibr\" href=\"#ref32\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Arpenti, Donaire, Ruggiero and Lippiello\u003C\u002Fspan\u003E32\u003C\u002Fa\u003E]. This was achieved by alternating energy injection and dissipation into\u002Ffrom the system to stabilize the walker at the target energy value corresponding to the natural gait. Moreover, the EPD-PBC methodology was also used with the IDA-PBC approach, showing that not only the natural passive gaits but also the gaits generated through energy shaping can be robustified using the proposed design [\u003Ca class=\"xref bibr\" href=\"#ref32\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Arpenti, Donaire, Ruggiero and Lippiello\u003C\u002Fspan\u003E32\u003C\u002Fa\u003E]. This work was carried out within the \u003Cem class=\"italic\"\u003Ehybrid zero dynamics\u003C\u002Fem\u003E (HZD) framework which also served as a starting point for the development of a tracking controller based on IDA-PBC able to guarantee the exponentially fast convergence of suitably defined output dynamics to the HZD manifold [\u003Ca class=\"xref bibr\" href=\"#ref33\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Arpenti, Donaire, Ruggiero and Lippiello\u003C\u002Fspan\u003E33\u003C\u002Fa\u003E]. The proposed strategy conferred robustness concerning parametric uncertainties to the closed-loop system by assigning desired error dynamics described through the pH formalism, thus preserving passivity.\u003C\u002Fp\u003E\n\u003Cp class=\"p\"\u003E On the quadrupedal locomotion side, an estimator of external disturbances independently acting on stance and swing legs was proposed in [\u003Ca class=\"xref bibr\" href=\"#ref39\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Morlando, Teimoorzadeh and Ruggiero\u003C\u002Fspan\u003E39\u003C\u002Fa\u003E]. Based on the system’s momentum, the estimator was leveraged along with a suitable motion planner for the trajectory of the robot’s center of mass and an optimization problem based on the modulation of ground reaction forces in a whole-body control strategy. Such a control architecture allows the locomotion of a legged robot inside an unstructured environment where collisions could happen and where irregularities in the terrain cause disturbances on legs. When significant forces act on both the center of mass and the robot’s legs, momentum-based observers are insufficient. Therefore, the work in [\u003Ca class=\"xref bibr\" href=\"#ref38\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Morlando and Ruggiero\u003C\u002Fspan\u003E38\u003C\u002Fa\u003E] proposed a “hybrid” observer, an estimator that combines a momentum-based observer for the angular term and an acceleration-based observer for the translational one, employing directly measurable values from the sensors. An approach based on two observers was also proposed in [\u003Ca class=\"xref bibr\" href=\"#ref37\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Morlando, Lippiello and Ruggiero\u003C\u002Fspan\u003E37\u003C\u002Fa\u003E], where a framework to control a quadruped robot tethered to a visually impaired person was presented, as illustrated in Fig. \u003Ca class=\"xref fig\" href=\"#f3\"\u003E3\u003C\u002Fa\u003E (left). Finally, in [\u003Ca class=\"xref bibr\" href=\"#ref18\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Morlando, Selvaggio and Ruggiero\u003C\u002Fspan\u003E18\u003C\u002Fa\u003E], the problem of non-prehensile object transportation through a legged manipulator is faced, arriving at a perfect combination of the topics seen in this section. An alternative whole-body control architecture was devised to prevent the sliding of the object placed on the tray at the manipulator’s end-effector while retaining the quadruped robot balance during walking, as shown in Fig. \u003Ca class=\"xref fig\" href=\"#f3\"\u003E3\u003C\u002Fa\u003E (right). Both contact forces between the tray and the object and between the legs and the ground were kept within their respective friction cones by solving a quadratic optimization problem while achieving the sought transportation task.\u003C\u002Fp\u003E\u003Csection\u003E\u003Cdiv class=\"fig\" data-magellan-destination=\"f3\" id=\"f3\"\u003E\n\n\n\u003Cdiv class=\"figure-thumb\"\u003E\u003Cimg src=\"data:image\u002Fgif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==\" data-src=\"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig3.png?pub-status=live\" class=\"aop-lazy-load-image\" width=\"4250\" height=\"1385\" data-original-image=\"\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig3.png\" data-zoomable=\"true\"\u003E\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"caption\"\u003E\u003Cp class=\"p\"\u003E \n\u003C\u002Fp\u003E\u003Cp class=\"p\"\u003E\u003Cspan class=\"label\"\u003EFigure 3.\u003C\u002Fspan\u003E On the left, a quadruped robot is connected to a human through a leash. This scenario was tested in the Gazebo simulation environment emulating a guide dog helping a visually impaired person. In the middle, a legged manipulator transports an object placed on a tray-like end-effector while simultaneously preventing it from sliding. On the right, the model behind this task, where the object (red cube) is prevented from sliding by keeping contact forces (blue) inside the friction cones (green).\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\u003C\u002Fdiv\u003E\u003C\u002Fsection\u003E\n\n\u003C\u002Fdiv\u003E\n\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"sec other\" data-magellan-destination=\"s3\" id=\"s3\"\u003E\n\n\u003Ch2 class=\"A\"\u003E\u003Cspan class=\"label\"\u003E3.\u003C\u002Fspan\u003E Aerial robotics\u003C\u002Fh2\u003E\n\u003Cp class=\"p\"\u003E Aerial robotics has been consolidated in the last decade as a research topic of interest for modeling and control, perception, planning, manipulation, and design. As such, it constitutes an effective technological solution for various applications such as inspection and maintenance, search and rescue, transportation and delivery, monitoring and patrolling, or 3D mapping. The maturity level reached in this field has led to the rise of several applications of aerial robots, with a focus on high altitude and challenging access scenarios that human operators cannot easily reach. The time, risk, and cost associated with conventional solutions involving the deployment of heavy vehicles and infrastructures motivate the development of aerial robots capable of quickly reaching these workspaces and performing visual or contact inspection operations. The research community faced two main problems during the deployment of reliable autonomous aerial robots. Firstly, conventional Vertical Takeoff and Landing (VToL) devices, like multirotor Unmanned Aerial Vehicles (UAVs) with parallel axes, faced challenges due to underactuation, impacting stabilization and trajectory tracking. Commonly, a hierarchical controller [\u003Ca class=\"xref bibr\" href=\"#ref46\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Mahony and Hamel\u003C\u002Fspan\u003E46\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref47\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Nonami, Kendoul, Suzuki and Wang\u003C\u002Fspan\u003E47\u003C\u002Fa\u003E] addresses this with time-scale separation between linear and angular dynamics. Position and yaw angle of VToL UAVs are flat outputs [\u003Ca class=\"xref bibr\" href=\"#ref48\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Spica, Franchi, Oriolo, Bülthoff and Giordano\u003C\u002Fspan\u003E48\u003C\u002Fa\u003E], allowing trajectory tracking and solving the underactuated problem. Secondly, as UAV aerodynamic models are complex, these require robust control designs. Most designs incorporated integral action to handle disturbances and cope with uncertainties (e.g., battery level). Adaptive controls [\u003Ca class=\"xref bibr\" href=\"#ref49\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Antonelli, Cataldi, Giordano, Chiaverini and Franchi\u003C\u002Fspan\u003E49\u003C\u002Fa\u003E–\u003Ca class=\"xref bibr\" href=\"#ref51\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Roberts and Tayebi\u003C\u002Fspan\u003E51\u003C\u002Fa\u003E], force observers [\u003Ca class=\"xref bibr\" href=\"#ref52\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Yüksel, Secchi, Bülthoff and Franchi\u003C\u002Fspan\u003E52\u003C\u002Fa\u003E], and passivity-based controllers [\u003Ca class=\"xref bibr\" href=\"#ref53\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Egeland and Godhavn\u003C\u002Fspan\u003E53\u003C\u002Fa\u003E] enhanced robustness. PH methods [\u003Ca class=\"xref bibr\" href=\"#ref52\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Yüksel, Secchi, Bülthoff and Franchi\u003C\u002Fspan\u003E52\u003C\u002Fa\u003E] and passive backstepping [\u003Ca class=\"xref bibr\" href=\"#ref54\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Ha, Zuo, Choi and Lee\u003C\u002Fspan\u003E54\u003C\u002Fa\u003E] were explored for improved control. For further exploration, comprehensive literature reviews can be found in [\u003Ca class=\"xref bibr\" href=\"#ref55\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Valvanis\u003C\u002Fspan\u003E55\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref56\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Valvanis and Vachtsevanos\u003C\u002Fspan\u003E56\u003C\u002Fa\u003E] among the others.\u003C\u002Fp\u003E\u003Cdiv class=\"table-wrap\" data-magellan-destination=\"tblII\" id=\"tblII\"\u003E\n\n\u003Cdiv class=\"caption\"\u003E\n\u003Cp class=\"p\"\u003E\u003Cspan class=\"label\"\u003ETable II.\u003C\u002Fspan\u003E Summary of PRISMA Lab contributions in the field of aerial robotics.\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\n\u003Cspan\u003E\n\n\u003Cdiv class=\"figure-thumb\"\u003E\u003Cimg src=\"data:image\u002Fgif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==\" data-src=\"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab2.png?pub-status=live\" class=\"aop-lazy-load-image\" width=\"674\" height=\"430\" data-original-image=\"\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab2.png\" data-zoomable=\"true\"\u003E\u003C\u002Fdiv\u003E\n\u003C\u002Fspan\u003E\n\u003C\u002Fdiv\u003E\n\n\u003Cp class=\"p\"\u003E Nowadays, the goal is the development of a new generation of flying service robots capable of supporting human beings in all those activities requiring the ability to interact actively and safely in the air. Challenging fields include inspecting buildings and large infrastructures, sample picking, and remote aerial manipulation. The latter is intended as the grasping, transporting, positioning, assembly and disassembly of mechanical parts, measurement instruments, and any objects performed with aerial vehicles. Indeed, UAVs are currently migrating from passive tasks like inspection, surveillance, monitoring, remote sensing, and so on, to active tasks like grasping and manipulation. UAVs must have the proper tools to accomplish manipulation tasks in the air. The two most adopted solutions are either to mount a gripper or a multi-fingered hand directly on the aerial vehicle, for example, a flying hand, or to equip the UAV with one or more robotic arms, for example, an unmanned aerial manipulator (UAM) as shown in Fig. \u003Ca class=\"xref fig\" href=\"#f4\"\u003E4\u003C\u002Fa\u003E. The UAM could be an efficient solution providing an aerial vehicle capable of performing dexterous manipulation tasks. Surveys regarding aerial manipulation can be found in refs. [\u003Ca class=\"xref bibr\" href=\"#ref57\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Oller, Tognon, Suarez, Lee and Franchi\u003C\u002Fspan\u003E57\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref58\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Ruggiero, Lippiello and Ollero\u003C\u002Fspan\u003E58\u003C\u002Fa\u003E].\u003C\u002Fp\u003E\n\u003Cp class=\"p\"\u003E In the following sections, an overview of the work carried out in aerial vehicle control and aerial manipulation is revised. Table \u003Ca class=\"xref table\" href=\"#tblII\"\u003EII\u003C\u002Fa\u003E provides a summary of the recent contributions related to these aspects. \u003C\u002Fp\u003E\u003Csection\u003E\u003Cdiv class=\"fig\" data-magellan-destination=\"f4\" id=\"f4\"\u003E\n\n\n\u003Cdiv class=\"figure-thumb\"\u003E\u003Cimg src=\"data:image\u002Fgif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==\" data-src=\"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig4.png?pub-status=live\" class=\"aop-lazy-load-image\" width=\"4250\" height=\"1126\" data-original-image=\"\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig4.png\" data-zoomable=\"true\"\u003E\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"caption\"\u003E\u003Cp class=\"p\"\u003E \n\u003C\u002Fp\u003E\u003Cp class=\"p\"\u003E\u003Cspan class=\"label\"\u003EFigure 4.\u003C\u002Fspan\u003E Two unmanned aerial manipulators during non-destructive test measurements. On the left, an aerial vehicle equipped with one arm is measuring the thickness of a wall with an ultrasonic probe. On the right, a hybrid drone equipped with a specially developed omnidirectional mobile base that can land on pipelines and then move to position ad-hoc measurement systems for non-destructive test measures.\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\u003C\u002Fdiv\u003E\u003C\u002Fsection\u003E\n\n\u003Cdiv class=\"sec\" data-magellan-destination=\"s3-1\" id=\"s3-1\"\u003E\n\n\u003Ch3 class=\"B\"\u003E\u003Cspan class=\"label\"\u003E3.1.\u003C\u002Fspan\u003E Control of aerial vehicles\u003C\u002Fh3\u003E\n\u003Cp class=\"p\"\u003E Model-based control of VToL UAVs leverages many simplifications by neglecting several aerodynamic effects whose presence affects the performance of tracking and regulation control problems. Therefore, researchers always seek robustification techniques to improve related problems.\u003C\u002Fp\u003E\n\u003Cp class=\"p\"\u003E An estimator of unmodeled dynamics and external wrench acting on the VToL UAV and based on the system’s momentum was employed in [\u003Ca class=\"xref bibr\" href=\"#ref59\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Ruggiero, Cacace, Sadeghian and Lippiello\u003C\u002Fspan\u003E59\u003C\u002Fa\u003E] to compensate for such disturbances. This estimator can be inserted in standard hierarchical controllers commanding UAVs with a flat propeller configuration. Another estimator, based on a robust extended-state observer, was designed in [\u003Ca class=\"xref bibr\" href=\"#ref60\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Sotos, Cacace, Ruggiero and Lippiello\u003C\u002Fspan\u003E60\u003C\u002Fa\u003E]. In this case, a UAV with passively tilted propellers was considered. In the case of a UAV with actively tilted propellers, instead, a robust controller is devised in [\u003Ca class=\"xref bibr\" href=\"#ref61\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Sotos, Ruggiero and Lippiello\u003C\u002Fspan\u003E61\u003C\u002Fa\u003E]. The proposed technique is model-free and based on a hyperbolic controller globally attracting the error signals to an ultimate bound about the origin despite external disturbances.\u003C\u002Fp\u003E\n\u003Cp class=\"p\"\u003E In the case of a quadrotor, the loss or damage of one propeller can be dramatic for the aerial vehicle’s stable flight. The techniques developed in refs. [\u003Ca class=\"xref bibr\" href=\"#ref62\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Lippiello, Ruggiero and Serra\u003C\u002Fspan\u003E62\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref63\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Lippiello, Ruggiero and Serra\u003C\u002Fspan\u003E63\u003C\u002Fa\u003E] can be employed to perform an emergency landing. While both are supposed to turn off the propeller as opposed to the damaged one, resulting in a bi-rotor configuration in which the yaw is uncontrolled, the former considers a PID approach, while the latter a backstepping approach to track the emergency landing trajectory in the Cartesian space.\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"sec\" data-magellan-destination=\"s3-2\" id=\"s3-2\"\u003E\n\n\u003Ch3 class=\"B\"\u003E\u003Cspan class=\"label\"\u003E3.2.\u003C\u002Fspan\u003E Aerial manipulation\u003C\u002Fh3\u003E\n\u003Cp class=\"p\"\u003E Four elements mainly constitute a UAM: \u003Cspan data-mathjax-status=\"alt-graphic\" class=\"inline-formula\"\u003E\n\u003Cspan class=\"alternatives\"\u003E\n\u003Cimg src=\"data:image\u002Fgif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==\" data-src=\"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_inline1.png?pub-status=live\" class=\"aop-lazy-load-image mathjax-alternative mathjax-alt-graphic mathjax-off\" width=\"10\" height=\"16\" data-original-image=\"\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_inline1.png\" data-zoomable=\"false\"\u003E\n\u003Cspan class=\"mathjax-tex-wrapper\" data-mathjax-type=\"texmath\"\u003E\u003Cspan class=\"tex-math mathjax-tex-math mathjax-on\"\u003E\n$i)$\n\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E\n\u003C\u002Fspan\u003E\n\u003C\u002Fspan\u003E the UAV floating base; \u003Cspan data-mathjax-status=\"alt-graphic\" class=\"inline-formula\"\u003E\n\u003Cspan class=\"alternatives\"\u003E\n\u003Cimg src=\"data:image\u002Fgif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==\" data-src=\"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_inline2.png?pub-status=live\" class=\"aop-lazy-load-image mathjax-alternative mathjax-alt-graphic mathjax-off\" width=\"15\" height=\"16\" data-original-image=\"\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_inline2.png\" data-zoomable=\"false\"\u003E\n\u003Cspan class=\"mathjax-tex-wrapper\" data-mathjax-type=\"texmath\"\u003E\u003Cspan class=\"tex-math mathjax-tex-math mathjax-on\"\u003E\n$ii)$\n\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E\n\u003C\u002Fspan\u003E\n\u003C\u002Fspan\u003E the robotic arm(s); \u003Cspan data-mathjax-status=\"alt-graphic\" class=\"inline-formula\"\u003E\n\u003Cspan class=\"alternatives\"\u003E\n\u003Cimg src=\"data:image\u002Fgif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==\" data-src=\"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_inline3.png?pub-status=live\" class=\"aop-lazy-load-image mathjax-alternative mathjax-alt-graphic mathjax-off\" width=\"20\" height=\"16\" data-original-image=\"\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_inline3.png\" data-zoomable=\"false\"\u003E\n\u003Cspan class=\"mathjax-tex-wrapper\" data-mathjax-type=\"texmath\"\u003E\u003Cspan class=\"tex-math mathjax-tex-math mathjax-on\"\u003E\n$iii)$\n\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E\n\u003C\u002Fspan\u003E\n\u003C\u002Fspan\u003E the gripper(s) or multi-fingered hand(s) attached at the end-effector of the arm(s); iv) the necessary sensory system. During the flight, the mounted robot arm provides even more issues since its dynamics depend on the actual configuration state of the whole system. There are two approaches to addressing planning and control problems for a UAM. The former is a “centralized” approach in which the UAV and the robotic arm are considered a unique entity. Thus the planning and the controller are designed from the complete kinematic and dynamic models. The latter approach considers the UAV and the robotic arm as separate independent systems. The effects of the arm on the aerial vehicle can be then considered external disturbances and vice versa [\u003Ca class=\"xref bibr\" href=\"#ref64\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference D’Ago, Selvaggio, Suarez, Gañán, Buonocore, Di Castro, Lippiello, Ollero and Ruggiero\u003C\u002Fspan\u003E64\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref65\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Ruggiero, Trujillo, Cano, Ascorbe, Viguria, Peréz, Lippiello, Ollero and Siciliano\u003C\u002Fspan\u003E65\u003C\u002Fa\u003E].\u003C\u002Fp\u003E\n\u003Cp class=\"p\"\u003E Aerial manipulation is now almost a reality in inspection and maintenance applications, particularly non-destructive test (NDT) measurements (see Fig. \u003Ca class=\"xref fig\" href=\"#f4\"\u003E4\u003C\u002Fa\u003E). In this scenario, ultrasonic probes are used to retrieve the wall thickness of a surface to prove the integrity of the material without compromising its internal structure. These tests are performed by placing the inspection probe in fixed contact with the surface under examination. Currently, NDT measurements are performed by humans who must climb a high scaffolding to reach the inspection location with the use of tools like man-lifts, cranes, or rope-access systems. Therefore, improving NDT inspection operations is fundamental to raising human safety and decreasing the economic costs of inspection procedures. The platforms presented in refs. [\u003Ca class=\"xref bibr\" href=\"#ref66\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Cacace, Fontanelli and Lippiello\u003C\u002Fspan\u003E66\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref67\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Cacace, Silva, Fontanelli and Lippiello\u003C\u002Fspan\u003E67\u003C\u002Fa\u003E] are possible solutions to address NDT measurements in challenging plants. There, a robotic arm was used for pipe inspection. Besides this, UAMs can interact with humans and help them in daily activities, becoming efficient aerial coworkers, particularly for working at height in inspection and maintenance activities that still require human intervention. Therefore, as long as the application range of drones increases, the possibility of sharing the human workspace also increases. Hence, it becomes paramount to understand how the interaction between humans and drones is established. The work in [\u003Ca class=\"xref bibr\" href=\"#ref68\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Cuniato, Cacace, Selvaggio, Ruggiero and Lippiello\u003C\u002Fspan\u003E68\u003C\u002Fa\u003E] went in this direction thanks to implementing a hardware-in-the-loop simulator for human cooperation with an aerial manipulator. The simulator provided the user with realistic haptic feedback for a human-aerial manipulator interaction activity. The forces exchanged between the hardware interface and the human\u002Fenvironment were measured and supplied to a dynamically simulated aerial manipulator. In turn, the simulated aerial platform fed back its position to the hardware allowing the human to feel and evaluate the interaction effects. Besides human-aerial manipulator cooperation, the simulator contributed to developing and testing autonomous control strategies in aerial manipulation.\u003C\u002Fp\u003E\n\u003Cp class=\"p\"\u003E Autonomous aerial manipulation tasks can be accomplished also thanks to the use of exteroceptive sensing for an image-based visual impedance control that allows realizing physical interaction of a dual-arm UAM equipped with a camera and a force\u002Ftorque sensor [\u003Ca class=\"xref bibr\" href=\"#ref69\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Lippiello, Fontanelli and Ruggiero\u003C\u002Fspan\u003E69\u003C\u002Fa\u003E]. The design of a hierarchical task-composition framework for controlling a UAM, which integrates the main benefits of both image-based and position-based control schemes into a unified hybrid-control framework, was presented in [\u003Ca class=\"xref bibr\" href=\"#ref25\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Lippiello, Cacace, Santamaria-Navarro, Andrade-Cetto, Trujillo, Esteves and Viguria\u003C\u002Fspan\u003E25\u003C\u002Fa\u003E]. Aerial manipulation tasks enabled by the proposed methods include the autonomous installation of clip bird diverters on high-voltage lines through a drone equipped with a sensorized stick to realize a compliant interaction with the environment [\u003Ca class=\"xref bibr\" href=\"#ref70\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference D’Angelo, Pagano, Ruggiero and Lippiello\u003C\u002Fspan\u003E70\u003C\u002Fa\u003E]. Besides enabling safer human operations, such application realize the huge impact of reducing collisions with wires by \u003Cspan data-mathjax-status=\"alt-graphic\" class=\"inline-formula\"\u003E\n\u003Cspan class=\"alternatives\"\u003E\n\u003Cimg src=\"data:image\u002Fgif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==\" data-src=\"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_inline4.png?pub-status=live\" class=\"aop-lazy-load-image mathjax-alternative mathjax-alt-graphic mathjax-off\" width=\"16\" height=\"13\" data-original-image=\"\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_inline4.png\" data-zoomable=\"false\"\u003E\n\u003Cspan class=\"mathjax-tex-wrapper\" data-mathjax-type=\"texmath\"\u003E\u003Cspan class=\"tex-math mathjax-tex-math mathjax-on\"\u003E\n$50$\n\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E\n\u003C\u002Fspan\u003E\n\u003C\u002Fspan\u003E to \u003Cspan data-mathjax-status=\"alt-graphic\" class=\"inline-formula\"\u003E\n\u003Cspan class=\"alternatives\"\u003E\n\u003Cimg src=\"data:image\u002Fgif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==\" data-src=\"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_inline5.png?pub-status=live\" class=\"aop-lazy-load-image mathjax-alternative mathjax-alt-graphic mathjax-off\" width=\"29\" height=\"13\" data-original-image=\"\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_inline5.png\" data-zoomable=\"false\"\u003E\n\u003Cspan class=\"mathjax-tex-wrapper\" data-mathjax-type=\"texmath\"\u003E\u003Cspan class=\"tex-math mathjax-tex-math mathjax-on\"\u003E\n$90\\%$\n\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E\n\u003C\u002Fspan\u003E\n\u003C\u002Fspan\u003E saving tens of thousands of birds’ lives during their migrations.\u003C\u002Fp\u003E\u003Cdiv class=\"table-wrap\" data-magellan-destination=\"tblIII\" id=\"tblIII\"\u003E\n\n\u003Cdiv class=\"caption\"\u003E\n\u003Cp class=\"p\"\u003E\u003Cspan class=\"label\"\u003ETable III.\u003C\u002Fspan\u003E Summary of PRISMA Lab contributions in the field of physical human-robot interaction.\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\n\u003Cspan\u003E\n\n\u003Cdiv class=\"figure-thumb\"\u003E\u003Cimg src=\"data:image\u002Fgif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==\" data-src=\"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab3.png?pub-status=live\" class=\"aop-lazy-load-image\" width=\"673\" height=\"388\" data-original-image=\"\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab3.png\" data-zoomable=\"true\"\u003E\u003C\u002Fdiv\u003E\n\u003C\u002Fspan\u003E\n\u003C\u002Fdiv\u003E\n\n\u003C\u002Fdiv\u003E\n\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"sec other\" data-magellan-destination=\"s4\" id=\"s4\"\u003E\n\n\u003Ch2 class=\"A\"\u003E\u003Cspan class=\"label\"\u003E4.\u003C\u002Fspan\u003E Physical human-robot interaction\u003C\u002Fh2\u003E\n\u003Cp class=\"p\"\u003E Performing physical actions robots can help humans in their jobs of daily lives [\u003Ca class=\"xref bibr\" href=\"#ref71\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Selvaggio, Cognetti, Nikolaidis, Ivaldi and Siciliano\u003C\u002Fspan\u003E71\u003C\u002Fa\u003E]. This is useful in several applications ranging from physical assistance to disabled or elderly people to reduction of risks and fatigue at work. However, an intuitive, safe, and reliable interaction must be established for the robot to become an ideal proximal or remote assistant\u002Fcollaborator. In the following sections, we are going to review recent work in this direction. Table \u003Ca class=\"xref table\" href=\"#tblIII\"\u003EIII\u003C\u002Fa\u003E provides a summary of the recent contributions in this field.\u003C\u002Fp\u003E\n\u003Cdiv class=\"sec\" data-magellan-destination=\"s4-1\" id=\"s4-1\"\u003E\n\n\u003Ch3 class=\"B\"\u003E\u003Cspan class=\"label\"\u003E4.1.\u003C\u002Fspan\u003E Proximal collaborative execution of structured tasks\u003C\u002Fh3\u003E\n\u003Cp class=\"p\"\u003E While collaborative robotic platforms ensuring safe and compliant physical HRI are spreading in service robotics applications, the collaborative execution of structured collaborative tasks still poses relevant research challenges [\u003Ca class=\"xref bibr\" href=\"#ref72\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Johannsmeier and Haddadin\u003C\u002Fspan\u003E72\u003C\u002Fa\u003E]. An effective and fluent human-robot collaboration during the execution of structured activities should support both cognitive and physical interaction. In these settings, operators and robots continuously estimate their reciprocal intentions to decide whether to commit to shared activities, when to switch towards different task, or how to regulate compliant interactions during co-manipulation operations. In refs. [\u003Ca class=\"xref bibr\" href=\"#ref73\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Cacace, Caccavale, Finzi and Grieco\u003C\u002Fspan\u003E73\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref74\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Cacace, Caccavale, Finzi and Lippiello\u003C\u002Fspan\u003E74\u003C\u002Fa\u003E], we addressed these issues by proposing a human-robot collaborative framework which seamlessly integrates task monitoring, task orchestration, and task-situated interpretation of the human physical guidance (see Fig. \u003Ca class=\"xref fig\" href=\"#f5\"\u003E5\u003C\u002Fa\u003E (e)) during the joint execution of hierarchically structured manipulation activities. In this setting, task orchestration and adaptation occur simultaneously with the interpretation of the human interventions. Depending on the assigned tasks, the supervisory framework enables potential subtasks, targets, and trajectories, while the human guidance is monitored by LSTM networks that classify the physical interventions of the operator. When the human guidance is assessed as aligned with the planned activities, the robotic system can keep executing the current activities, while suitably adjusting subtasks, targets, or motion trajectories following the corrections provided by the operator. Within this collaborative framework, different modalities of human-robot collaboration (human-guided, task-guided, balanced) were explored and assessed in terms of their effectiveness and user experience during the interaction.\u003C\u002Fp\u003E\u003Csection\u003E\u003Cdiv class=\"fig\" data-magellan-destination=\"f5\" id=\"f5\"\u003E\n\n\n\u003Cdiv class=\"figure-thumb\"\u003E\u003Cimg src=\"data:image\u002Fgif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==\" data-src=\"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig5.png?pub-status=live\" class=\"aop-lazy-load-image\" width=\"4255\" height=\"1611\" data-original-image=\"\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig5.png\" data-zoomable=\"true\"\u003E\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"caption\"\u003E\u003Cp class=\"p\"\u003E \n\u003C\u002Fp\u003E\u003Cp class=\"p\"\u003E\u003Cspan class=\"label\"\u003EFigure 5.\u003C\u002Fspan\u003E (a) cognitive control framework compatible with AI methods for planning, reasoning, and learning; (b) task orchestration and situated interpretation of ambiguous human gestures; (c) kinesthetic teaching of structured tasks; combined task and motion plans (d); human-robot collaboration during the execution of a shared task (e).\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\u003C\u002Fdiv\u003E\u003C\u002Fsection\u003E\n\n\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"sec\" data-magellan-destination=\"s4-2\" id=\"s4-2\"\u003E\n\n\u003Ch3 class=\"B\"\u003E\u003Cspan class=\"label\"\u003E4.2.\u003C\u002Fspan\u003E Remote collaboration via shared control\u003C\u002Fh3\u003E\n\u003Cp class=\"p\"\u003E Physical interactions between humans and robots are exploited to perform common or independent tasks. When the two parts work together to achieve a common goal, the robotic system may integrate some degree of autonomy aimed to help the human in executing the task, ensuring better performance, safety, and ergonomics. We refer to these as shared control or shared autonomy scenarios, with the latter considered as the case in which the autonomy level is possibly varying [\u003Ca class=\"xref bibr\" href=\"#ref71\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Selvaggio, Cognetti, Nikolaidis, Ivaldi and Siciliano\u003C\u002Fspan\u003E71\u003C\u002Fa\u003E]. Broadly speaking there is the spectrum of possible interactions between humans and robots, from robots having full autonomy to none at all [\u003Ca class=\"xref bibr\" href=\"#ref75\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Goodrich and Schultz\u003C\u002Fspan\u003E75\u003C\u002Fa\u003E]. As full autonomy still poses a problem for robotic systems when dealing with unknown or complex tasks in unstructured and uncertain scenarios [\u003Ca class=\"xref bibr\" href=\"#ref76\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Yang, Cambias, Cleary, Daimler, Drake, Dupont, Hata, Kazanzides, Martel, Patel, Santos and Taylor\u003C\u002Fspan\u003E76\u003C\u002Fa\u003E], shared control comes useful to improve the task performance while not increasing the human operator workload [\u003Ca class=\"xref bibr\" href=\"#ref77\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Kanda and Ishiguro\u003C\u002Fspan\u003E77\u003C\u002Fa\u003E]. Research about shared control focuses on the extent of human intervention in the control of artificial systems, splitting the workload between the two [\u003Ca class=\"xref bibr\" href=\"#ref78\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Schilling, Burgard, Muelling, Wrede and Ritter\u003C\u002Fspan\u003E78\u003C\u002Fa\u003E]. The extent of human intervention, and thus robot autonomy, has been usually classified into discrete levels [\u003Ca class=\"xref bibr\" href=\"#ref79\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Bruemmer, Dudenhoeffer and Marble\u003C\u002Fspan\u003E79\u003C\u002Fa\u003E–\u003Ca class=\"xref bibr\" href=\"#ref81\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Kortenkamp, Keirn-Schreckenghost and Bonasso\u003C\u002Fspan\u003E81\u003C\u002Fa\u003E], with fewer studies considering a continuous domain [\u003Ca class=\"xref bibr\" href=\"#ref82\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Anderson, Peters, Iagnemma and Overholt\u003C\u002Fspan\u003E82\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref83\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Desai and Yanco\u003C\u002Fspan\u003E83\u003C\u002Fa\u003E]. Commonly, shared control techniques aim to fully or partially replace a function, such as identifying objects in cluttered environments [\u003Ca class=\"xref bibr\" href=\"#ref84\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Pitzer, Styer, Bersch, DuHadway and Becker\u003C\u002Fspan\u003E84\u003C\u002Fa\u003E], while others start from a fully autonomous robot and give control to the user only in difficult situations [\u003Ca class=\"xref bibr\" href=\"#ref80\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Dias, Kannan, Browning, Jones, Argall, Dias, Zinck, Veloso and Stentz\u003C\u002Fspan\u003E80\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref81\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Kortenkamp, Keirn-Schreckenghost and Bonasso\u003C\u002Fspan\u003E81\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref85\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Sellner, Simmons and Singh\u003C\u002Fspan\u003E85\u003C\u002Fa\u003E]. Some studies assist the operator by predicting their intent while selecting among different targets [\u003Ca class=\"xref bibr\" href=\"#ref86\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Dragan and Srinivasa\u003C\u002Fspan\u003E86\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref87\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Javdani, Srinivasa and Bagnell\u003C\u002Fspan\u003E87\u003C\u002Fa\u003E], while others exploit haptic feedback\u002Fguidance techniques while moving toward a specific target [\u003Ca class=\"xref bibr\" href=\"#ref88\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Aarno, Ekvall and Kragic\u003C\u002Fspan\u003E88\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref89\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Crandall and Goodrich\u003C\u002Fspan\u003E89\u003C\u002Fa\u003E].\u003C\u002Fp\u003E\u003Csection\u003E\u003Cdiv class=\"fig\" data-magellan-destination=\"f6\" id=\"f6\"\u003E\n\n\n\u003Cdiv class=\"figure-thumb\"\u003E\u003Cimg src=\"data:image\u002Fgif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==\" data-src=\"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig6.png?pub-status=live\" class=\"aop-lazy-load-image\" width=\"2881\" height=\"1260\" data-original-image=\"\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig6.png\" data-zoomable=\"true\"\u003E\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"caption\"\u003E\u003Cp class=\"p\"\u003E \n\u003C\u002Fp\u003E\u003Cp class=\"p\"\u003E\u003Cspan class=\"label\"\u003EFigure 6.\u003C\u002Fspan\u003E A shared control telerobotic system consists of a local device used to jointly send partial commands and receive computed haptic information as feedback from the remote side. The user usually observes the remote environment by means of a camera that provides a limited awareness of the scene. In (a), the robot must execute a remote object grasping task [\u003Ca class=\"xref bibr\" href=\"#ref91\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Selvaggio, Giordano, Ficuciello and Siciliano\u003C\u002Fspan\u003E91\u003C\u002Fa\u003E]. In this case, provided haptic information aims to increase the situational awareness of the operator informing about the proximity to the robot’s joint limits and singularities. In (b) and (c), vision-based or programmed virtual fixtures aid the execution of the task in industrial and surgical robotic settings, respectively [\u003Ca class=\"xref bibr\" href=\"#ref92\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Selvaggio, Fontanelli, Ficuciello, Villani and Siciliano\u003C\u002Fspan\u003E92\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref93\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Selvaggio, Notomista, Chen, Gao, Trapani and Caldwell\u003C\u002Fspan\u003E93\u003C\u002Fa\u003E]. In (d), a non-prehensile object transportation scenario is considered and haptic feedback is provided about the proximity to the sliding conditions of the object placed on the tray [\u003Ca class=\"xref bibr\" href=\"#ref19\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Selvaggio, Cacace, Pacchierotti, Ruggiero and Giordano\u003C\u002Fspan\u003E19\u003C\u002Fa\u003E].\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\u003C\u002Fdiv\u003E\u003C\u002Fsection\u003E\n\n\u003Cp class=\"p\"\u003E Shared control\u002Fautonomy may take several forms and make use of a wide spectrum of methodologies depending on the application scenario. For example, when a human has to perform a complex manipulation task in a remote area by means of a dual-arm system, shared control methods may be designed to reduce the number of degrees of freedom controlled by the user while ensuring the task’s feasibility [\u003Ca class=\"xref bibr\" href=\"#ref90\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Selvaggio, Abi-Farraj, Pacchierotti, Giordano and Siciliano\u003C\u002Fspan\u003E90\u003C\u002Fa\u003E]. In this way, the task execution becomes inherently less demanding both physically and cognitively. With the same aim, the autonomy and the human may be in charge of tasks having different priorities. In these cases, the tasks are usually organized hierarchically in a stack. Also in this case, controlling only one task, involving a minimum number of degrees of freedom, the human control of the robotic system becomes less fatigued [\u003Ca class=\"xref bibr\" href=\"#ref91\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Selvaggio, Giordano, Ficuciello and Siciliano\u003C\u002Fspan\u003E91\u003C\u002Fa\u003E]. In remote applications, the user’s perception and awareness of the environment are usually hindered by the limited field of view provided by the remotely installed vision sensors (see Fig. \u003Ca class=\"xref fig\" href=\"#f6\"\u003E6\u003C\u002Fa\u003E (a)). For this reason, it is beneficial to exploit additional communication channels (besides the visual one) to convey information about the state of the remote system\u002Fenvironment.\u003C\u002Fp\u003E\n\u003Cp class=\"p\"\u003E Haptic guidance is usually employed in this case to increase the awareness of the robotic system state by displaying computed forces through a haptic device, which is also used to send commands to the robotic system. Haptic guidance may inform the user about the proximity to the system’s constraints (e.g., joint limits, singularities, collisions, etc.), suggesting motion directions that are free from constraints and safe for the task execution [\u003Ca class=\"xref bibr\" href=\"#ref90\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Selvaggio, Abi-Farraj, Pacchierotti, Giordano and Siciliano\u003C\u002Fspan\u003E90\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref91\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Selvaggio, Giordano, Ficuciello and Siciliano\u003C\u002Fspan\u003E91\u003C\u002Fa\u003E]. This may also be used to direct the user towards grasping poses that avoid constraints during post-grasping task trajectories [\u003Ca class=\"xref bibr\" href=\"#ref94\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Selvaggio, A.Ghalamzan, Moccia, Ficuciello and Siciliano\u003C\u002Fspan\u003E94\u003C\u002Fa\u003E]. In addition to this, haptic guidance in the form of virtual fixtures may be employed when the application requires following paths with high precision, such as in hazardous industrial scenarios [\u003Ca class=\"xref bibr\" href=\"#ref93\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Selvaggio, Notomista, Chen, Gao, Trapani and Caldwell\u003C\u002Fspan\u003E93\u003C\u002Fa\u003E] (see Fig. \u003Ca class=\"xref fig\" href=\"#f6\"\u003E6\u003C\u002Fa\u003E (b)) or in surgical dissection scenarios [\u003Ca class=\"xref bibr\" href=\"#ref92\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Selvaggio, Fontanelli, Ficuciello, Villani and Siciliano\u003C\u002Fspan\u003E92\u003C\u002Fa\u003E] (see Fig. \u003Ca class=\"xref fig\" href=\"#f6\"\u003E6\u003C\u002Fa\u003E (c)). More recently, we have developed shared control methods for a remote robotic system performing a dynamic non-prehensile object transportation task, where haptic guidance was used to inform the user about proximity to the sliding condition [\u003Ca class=\"xref bibr\" href=\"#ref19\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Selvaggio, Cacace, Pacchierotti, Ruggiero and Giordano\u003C\u002Fspan\u003E19\u003C\u002Fa\u003E] (see Fig. \u003Ca class=\"xref fig\" href=\"#f6\"\u003E6\u003C\u002Fa\u003E (d)).\u003C\u002Fp\u003E\u003Cdiv class=\"table-wrap\" data-magellan-destination=\"tblIV\" id=\"tblIV\"\u003E\n\n\u003Cdiv class=\"caption\"\u003E\n\u003Cp class=\"p\"\u003E\u003Cspan class=\"label\"\u003ETable IV.\u003C\u002Fspan\u003E Summary of PRISMA Lab contributions in the field of AI and cognitive robotics.\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\n\u003Cspan\u003E\n\n\u003Cdiv class=\"figure-thumb\"\u003E\u003Cimg src=\"data:image\u002Fgif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==\" data-src=\"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab4.png?pub-status=live\" class=\"aop-lazy-load-image\" width=\"676\" height=\"516\" data-original-image=\"\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab4.png\" data-zoomable=\"true\"\u003E\u003C\u002Fdiv\u003E\n\u003C\u002Fspan\u003E\n\u003C\u002Fdiv\u003E\n\n\u003C\u002Fdiv\u003E\n\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"sec other\" data-magellan-destination=\"s5\" id=\"s5\"\u003E\n\n\u003Ch2 class=\"A\"\u003E\u003Cspan class=\"label\"\u003E5.\u003C\u002Fspan\u003E AI and cognitive robotics\u003C\u002Fh2\u003E\n\u003Cp class=\"p\"\u003E In order for a robot to autonomously or cooperatively perform complex tasks in the real world its control system should be endowed with cognitive capabilities enabling deliberation, execution, learning, and perception in dynamic, interactive, and unstructured environments [\u003Ca class=\"xref bibr\" href=\"#ref95\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Rodriguez-Guerra, Sorrosal, Cabanes and Calleja\u003C\u002Fspan\u003E95\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref96\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Schultheis and Cooper\u003C\u002Fspan\u003E96\u003C\u002Fa\u003E]. Cognitive robotics [\u003Ca class=\"xref bibr\" href=\"#ref97\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Beetz, Beßler, Haidu, Pomarlan, Bozcuoğlu and Bartels\u003C\u002Fspan\u003E97\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref98\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Lemaignan, Warnier, Sisbot, Clodic and Alami\u003C\u002Fspan\u003E98\u003C\u002Fa\u003E] is concerned with these issues proposing architectures and methods for seamlessly integrating sensorimotor, cognitive, and interaction abilities in autonomous\u002Finteractive robots. Exploring these topics involves various research areas across AI and robotics. Flexible orchestration, execution, and monitoring of structured tasks is a particularly relevant aspect of robotics [\u003Ca class=\"xref bibr\" href=\"#ref99\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Beßler, Porzel, Pomarlan, Beetz, Malaka and Bateman\u003C\u002Fspan\u003E99\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref100\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference de la Cruz, Piater and Saveriano\u003C\u002Fspan\u003E100\u003C\u002Fa\u003E]. Current AI and robotics literature mostly relies on integrated planning and execution frameworks to address adaptive execution of complex activities [\u003Ca class=\"xref bibr\" href=\"#ref101\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Carbone, Finzi, Orlandini and Pirri\u003C\u002Fspan\u003E101\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref102\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Karpas, Levine, Yu and Williams\u003C\u002Fspan\u003E102\u003C\u002Fa\u003E]. On the other hand, cognitive control models and methods [\u003Ca class=\"xref bibr\" href=\"#ref103\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Botvinick, Braver, Barch, Carter and Cohen\u003C\u002Fspan\u003E103\u003C\u002Fa\u003E–\u003Ca class=\"xref bibr\" href=\"#ref105\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Cooper and Shallice\u003C\u002Fspan\u003E105\u003C\u002Fa\u003E] can be deployed to improve robot autonomy as well HRI performance. In this direction, we are currently investigating these methods to develop a cognitive control framework suitable for human-robot collaboration. Another relevant issue we are concerned with is the combination of symbolic and sub-symbolic approaches to incremental task learning [\u003Ca class=\"xref bibr\" href=\"#ref106\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Petrík, Tapaswi, Laptev and Sivic\u003C\u002Fspan\u003E106\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref107\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Ramirez-Amaro, Yang and Cheng\u003C\u002Fspan\u003E107\u003C\u002Fa\u003E] and task and motion planning [\u003Ca class=\"xref bibr\" href=\"#ref108\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Mansouri, Pecora and Schüller\u003C\u002Fspan\u003E108\u003C\u002Fa\u003E]. In Table \u003Ca class=\"xref table\" href=\"#tblIV\"\u003EIV\u003C\u002Fa\u003E, we provide an overview of recent research activities related to these aspects. These works and results are further described and discussed in the following sections and categorized in Table \u003Ca class=\"xref table\" href=\"#tblIV\"\u003EIV\u003C\u002Fa\u003E.\u003C\u002Fp\u003E\n\u003Cdiv class=\"sec\" data-magellan-destination=\"s5-1\" id=\"s5-1\"\u003E\n\n\u003Ch3 class=\"B\"\u003E\u003Cspan class=\"label\"\u003E5.1.\u003C\u002Fspan\u003E Flexible and collaborative execution of multiple tasks\u003C\u002Fh3\u003E\n\u003Cp class=\"p\"\u003E An autonomous and collaborative robotic system is expected to flexibly execute multiple structured tasks while adeptly handling unexpected events and behaviors. In cognitive psychology and neuroscience, the executive mechanisms needed to support flexible, adaptive responses, and complex goal-directed cognitive processes are associated with the concept of cognitive control [\u003Ca class=\"xref bibr\" href=\"#ref103\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Botvinick, Braver, Barch, Carter and Cohen\u003C\u002Fspan\u003E103\u003C\u002Fa\u003E]. Despite their relevance in cognitive science, cognitive control models have seldom been integrated into robotic systems. In this regard, we aim at combining classic AI and machine learning methods with cognitive control mechanisms to support flexible and situated adaptive orchestration of robotic activities as well as task planning and learning. In particular, we rely on a supervisory attentional system (SAS) [\u003Ca class=\"xref bibr\" href=\"#ref105\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Cooper and Shallice\u003C\u002Fspan\u003E105\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref122\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Norman and Shallice\u003C\u002Fspan\u003E122\u003C\u002Fa\u003E] to orchestrate the execution of hierarchically organized robotic behaviors. This paradigm seems particularly effective for both flexible plan execution and human-robot collaboration, in that it provides attention mechanisms considered as pivotal not only for task switching and regulation but also for human-human communication. Following this approach, we are currently developing a robotic cognitive control framework, based on the SAS paradigm, enabling multiple task orchestration execution, collaborative execution of structured tasks, and incremental task learning [\u003Ca class=\"xref bibr\" href=\"#ref114\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Caccavale and Finzi\u003C\u002Fspan\u003E114\u003C\u002Fa\u003E]. In this direction, we proposed and developed a practical attention-based executive framework (see (a) in Fig. \u003Ca class=\"xref fig\" href=\"#f5\"\u003E5\u003C\u002Fa\u003E), suitable for real-world collaborative robotic systems, which is also compatible with AI methods for planning, execution, learning, and HRI\u002Fcommunication. We show that the proposed framework supports flexible orchestration of multiple concurrent tasks hierarchically organized [\u003Ca class=\"xref bibr\" href=\"#ref111\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Caccavale and Finzi\u003C\u002Fspan\u003E111\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref112\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Caccavale and Finzi\u003C\u002Fspan\u003E112\u003C\u002Fa\u003E] and natural human-robot collaborative execution of structured activities [\u003Ca class=\"xref bibr\" href=\"#ref114\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Caccavale and Finzi\u003C\u002Fspan\u003E114\u003C\u002Fa\u003E], in that it allows fast and adaptive responses to unexpected events while reducing replanning [\u003Ca class=\"xref bibr\" href=\"#ref110\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Caccavale, Cacace, Fiore, Alami and Finzi\u003C\u002Fspan\u003E110\u003C\u002Fa\u003E] and supporting task-situated interpretation of the human interventions [\u003Ca class=\"xref bibr\" href=\"#ref74\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Cacace, Caccavale, Finzi and Lippiello\u003C\u002Fspan\u003E74\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref115\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Caccavale, Leone, Lucignano, Rossi, Staffa and Finzi\u003C\u002Fspan\u003E115\u003C\u002Fa\u003E] (e.g., human pointing gestures as in (b) Fig. \u003Ca class=\"xref fig\" href=\"#f5\"\u003E5\u003C\u002Fa\u003E). Attentional mechanisms are also effective in improving users’ situation awareness and interpretation of robot behaviors by regulating or adjusting human-robot communication depending on the executive context [\u003Ca class=\"xref bibr\" href=\"#ref109\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Cacace, Caccavale, Finzi and Lippiello\u003C\u002Fspan\u003E109\u003C\u002Fa\u003E] or to support explainability during human-robot collaboration [\u003Ca class=\"xref bibr\" href=\"#ref113\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Caccavale and Finzi\u003C\u002Fspan\u003E113\u003C\u002Fa\u003E].\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"sec\" data-magellan-destination=\"s5-2\" id=\"s5-2\"\u003E\n\n\u003Ch3 class=\"B\"\u003E\u003Cspan class=\"label\"\u003E5.2.\u003C\u002Fspan\u003E Task learning and teaching\u003C\u002Fh3\u003E\n\u003Cp class=\"p\"\u003E Attention-based task supervision and execution provide natural and effective support to task teaching and learning from demonstrations [\u003Ca class=\"xref bibr\" href=\"#ref114\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Caccavale and Finzi\u003C\u002Fspan\u003E114\u003C\u002Fa\u003E]. In [\u003Ca class=\"xref bibr\" href=\"#ref117\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Caccavale, Saveriano, Finzi and Lee\u003C\u002Fspan\u003E117\u003C\u002Fa\u003E], we proposed a framework enabling kinesthetic teaching of hierarchical tasks starting from abstract\u002Fincomplete descriptions: the human physical demonstration (as in (c) Fig. \u003Ca class=\"xref fig\" href=\"#f5\"\u003E5\u003C\u002Fa\u003E) is segmented into low-level controllers while a supervisory attentional system associates the generated segments to the abstract task structure, providing it with concrete\u002Fexecutable primitives. In this context, attentional manipulation (object or verbal cueing) can be exploited by the human to facilitate the matching between (top-down) proposed tasks\u002Fsubtasks and (bottom-up) generated segments\u002Fmodels. Such an approach was also extended to the imitation learning of dual-arm structured robotic tasks [\u003Ca class=\"xref bibr\" href=\"#ref118\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Caccavale, Saveriano, Fontanelli, Ficuciello, Lee and Finzi\u003C\u002Fspan\u003E118\u003C\u002Fa\u003E]. Attentional top-down and bottom-up regulations can also be learned from the demonstration. In [\u003Ca class=\"xref bibr\" href=\"#ref116\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Caccavale and Finzi\u003C\u002Fspan\u003E116\u003C\u002Fa\u003E], robotic task structures are associated with a multi-layered feed-forward neural network whose nodes\u002Fedges represent actions\u002Frelations to be executed in so combining neural-based learning and symbolic activities. Multi-robot task learning issues were also explored. In [\u003Ca class=\"xref bibr\" href=\"#ref119\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Caccavale, Ermini, Fedeli, Finzi, Lippiello and Tavano\u003C\u002Fspan\u003E119\u003C\u002Fa\u003E], a reinforcement deep Q-learning approach was proposed to guide a group of sanitizing robots in cleaning railway stations with dynamic priorities. This approach was also extended to prioritized cleaning with heterogeneous teams of robots [\u003Ca class=\"xref bibr\" href=\"#ref120\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Caccavale, Ermini, Fedeli, Finzi, Lippiello and Tavano\u003C\u002Fspan\u003E120\u003C\u002Fa\u003E].\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"sec\" data-magellan-destination=\"s5-3\" id=\"s5-3\"\u003E\n\n\u003Ch3 class=\"B\"\u003E\u003Cspan class=\"label\"\u003E5.3.\u003C\u002Fspan\u003E Combined task and motion planning\u003C\u002Fh3\u003E\n\u003Cp class=\"p\"\u003E Task and motion planning in robotics are typically handled by separate methods, with high-level task planners generating abstract actions and motion planners specifying concrete motions. These two planning processes are, however, strictly interdependent, and various approaches have been proposed in the literature to efficiently generate combined plans [\u003Ca class=\"xref bibr\" href=\"#ref108\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Mansouri, Pecora and Schüller\u003C\u002Fspan\u003E108\u003C\u002Fa\u003E]. Recently, we started to investigate how sampling-based methods such as Rapidly Exploring Random Trees (RRTs), commonly employed for motion planning, can be leveraged to generate task and motion plans within a metric space where both symbolic (task) and sub-symbolic (motion) spaces are represented [\u003Ca class=\"xref bibr\" href=\"#ref121\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Caccavale and Finzi\u003C\u002Fspan\u003E121\u003C\u002Fa\u003E]. The notion of distance defined in this extended metric space is then exploited to guide the expansion of the RRT to generate plans including both symbolic actions and feasible movements in the configuration space (see (d) in Fig. \u003Ca class=\"xref fig\" href=\"#f5\"\u003E5\u003C\u002Fa\u003E). Empirical results collected in mobile robotics case studies suggest that the approach is feasible in realistic scenarios, while its effectiveness is more emphasized in complex and cluttered environments.\u003C\u002Fp\u003E\u003Cdiv class=\"table-wrap\" data-magellan-destination=\"tblV\" id=\"tblV\"\u003E\n\n\u003Cdiv class=\"caption\"\u003E\n\u003Cp class=\"p\"\u003E\u003Cspan class=\"label\"\u003ETable V.\u003C\u002Fspan\u003E Summary of PRISMA Lab contributions in the field of industrial robotics.\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\n\u003Cspan\u003E\n\n\u003Cdiv class=\"figure-thumb\"\u003E\u003Cimg src=\"data:image\u002Fgif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==\" data-src=\"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab5.png?pub-status=live\" class=\"aop-lazy-load-image\" width=\"677\" height=\"269\" data-original-image=\"\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab5.png\" data-zoomable=\"true\"\u003E\u003C\u002Fdiv\u003E\n\u003C\u002Fspan\u003E\n\u003C\u002Fdiv\u003E\n\n\u003C\u002Fdiv\u003E\n\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"sec other\" data-magellan-destination=\"s6\" id=\"s6\"\u003E\n\n\u003Ch2 class=\"A\"\u003E\u003Cspan class=\"label\"\u003E6.\u003C\u002Fspan\u003E Industrial robotics\u003C\u002Fh2\u003E\n\u003Cp class=\"p\"\u003E In industry, logistics aims at optimizing the flow of goods inside the large-scale distribution. The task of unloading carton cases from a pallet, usually referred to as depalletizing, yields several technological challenges [\u003Ca class=\"xref bibr\" href=\"#ref123\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Echelmeyer, Kirchheim and Wellbrock\u003C\u002Fspan\u003E123\u003C\u002Fa\u003E] due to the heterogeneous nature of the cases that can present different dimensions, shapes, weights, and textures. This is the case in supermarkets where the products are stored on mixed pallets, which are pallets made of heterogeneous cases. On the other side, the literature review is mainly focused on the easier task of depalletizing homogeneous pallets, which are pallets made of standardized and equal cases. For instance, AI-enabled depalletizing systems were proposed to address problems of motion planning [\u003Ca class=\"xref bibr\" href=\"#ref124\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Sakamoto, Harada and Wan\u003C\u002Fspan\u003E124\u003C\u002Fa\u003E] and safety [\u003Ca class=\"xref bibr\" href=\"#ref125\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Jocas, Kurrek, Zoghlami, Gianni and Salehi\u003C\u002Fspan\u003E125\u003C\u002Fa\u003E]. In [\u003Ca class=\"xref bibr\" href=\"#ref126\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Nakamoto, Eto, Sonoura, Tanaka and Ogawa\u003C\u002Fspan\u003E126\u003C\u002Fa\u003E], the use of target plane extraction from depth images and package border detection via brightness images to recognize various packages stacked complicatedly was proposed. A similar perception system can be found also in [\u003Ca class=\"xref bibr\" href=\"#ref127\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Schwarz, Milan, Periyasamy and Behnke\u003C\u002Fspan\u003E127\u003C\u002Fa\u003E], where a deep-learning approach that combines object detection and semantic segmentation was applied to pick bins in cluttered warehouse scenarios. In this case, a specific data-reduction method was deployed to reduce the dimension of the dataset but several images of objects are still needed, impairing its usage by non-expert operators. Moreover, in [\u003Ca class=\"xref bibr\" href=\"#ref128\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Katsoulas and Kosmopoulos\u003C\u002Fspan\u003E128\u003C\u002Fa\u003E] a system comprising an industrial robot and time-of-flight laser sensors was used to perform the depalletizing task. Some examples of specific gripping solutions developed to address both depalletizing and palletizing tasks (the task of loading cases to assemble a pallet) in highly structured industrial environments include: the robotic manipulator proposed in [\u003Ca class=\"xref bibr\" href=\"#ref129\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Krug, Stoyanov, Tincani, Andreasson, Mosberger, Fantoni and Lilienthal\u003C\u002Fspan\u003E129\u003C\u002Fa\u003E], the suction systems applied on an autonomous robot capable of picking standard boxes from the upper side and placing them on a conveyance line proposed in [\u003Ca class=\"xref bibr\" href=\"#ref126\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Nakamoto, Eto, Sonoura, Tanaka and Ogawa\u003C\u002Fspan\u003E126\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref130\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Tanaka, Ogawa, Nakamoto, Sonoura and Eto\u003C\u002Fspan\u003E130\u003C\u002Fa\u003E], as well as the flexible robotic palletizer presented in [\u003Ca class=\"xref bibr\" href=\"#ref131\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Moura and Silva\u003C\u002Fspan\u003E131\u003C\u002Fa\u003E]. Table \u003Ca class=\"xref table\" href=\"#tblV\"\u003EV\u003C\u002Fa\u003E provides an overview of the work done in this field.\u003C\u002Fp\u003E\n\u003Cdiv class=\"sec\" data-magellan-destination=\"s6-1\" id=\"s6-1\"\u003E\n\n\u003Ch3 class=\"B\"\u003E\u003Cspan class=\"label\"\u003E6.1.\u003C\u002Fspan\u003E Logistics\u003C\u002Fh3\u003E\n\u003Cp class=\"p\"\u003E A common activity in logistics is to depalletize goods from shipping pallets. This task, which is hard and uncomfortable for human operators, is often performed by robotic depalletizing systems. These automated solutions are very effective in well-structured environments, however, there are more complex situations, such as depalletizing of mixed pallets in supermarkets, which still represent a challenge for robotic systems. In recent years, we studied the problem of depalletizing mixed and randomly organized pallets by proposing a robotic depalletizing system [\u003Ca class=\"xref bibr\" href=\"#ref132\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Caccavale, Arpenti, Paduano, Fontanellli, Lippiello, Villani and Siciliano\u003C\u002Fspan\u003E132\u003C\u002Fa\u003E] integrating attentional mechanisms from Sec. \u003Ca class=\"xref sec\" href=\"#s5\"\u003E5\u003C\u002Fa\u003E to flexibly schedule, monitor, and adapt the depalletizing process considering online perceptual information from non-invasive sensors as well as high-level constraints that can be provided by supervising users or management systems.\u003C\u002Fp\u003E\n\u003Cp class=\"p\"\u003E Such flexible depalletizing processes also require strong perceptive capabilities. To this end, in [\u003Ca class=\"xref bibr\" href=\"#ref133\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Arpenti, Caccavale, Paduano, Fontanelli, Lippiello, Villani and Siciliano\u003C\u002Fspan\u003E133\u003C\u002Fa\u003E] a single-camera system was proposed, where RGB-D data were used for the detection, recognition, and localization of heterogeneous cases, both textured and untextured, in a mixed pallet. Specifically, a priori information about the content of the pallet (the product barcode, the number of instances of a given product case in the pallet, the dimensions of the cases, and the images of the textured cases) was combined with data from the RGB-D camera, exploiting a pipeline of 2D and 3D model-based computer vision algorithms, as shown in Fig. \u003Ca class=\"xref fig\" href=\"#f7\"\u003E7\u003C\u002Fa\u003E, left. The integration of such a system into logistic chains was simplified by the short dataset required, based only on the images of the cases in the current pallet, and on a single image from a single RGB-D sensor.\u003C\u002Fp\u003E\n\u003Cp class=\"p\"\u003E In addition to cognitive and perceptual capabilities, depalletizing robotic systems also requires a high degree of dexterity to effectively grasp mixed cases with complex shapes. In [\u003Ca class=\"xref bibr\" href=\"#ref134\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Fontanelli, Paduano, Caccavale, Arpenti, Lippiello, Villani and Siciliano\u003C\u002Fspan\u003E134\u003C\u002Fa\u003E], we proposed a sensorized gripper, designed to be assembled on the end-tip of an industrial robotic arm, that allowed grasping of cases either from above or from the lateral sides and was capable to adapt online its shape to different sizes of products.\u003C\u002Fp\u003E\u003Csection\u003E\u003Cdiv class=\"fig\" data-magellan-destination=\"f7\" id=\"f7\"\u003E\n\n\n\u003Cdiv class=\"figure-thumb\"\u003E\u003Cimg src=\"data:image\u002Fgif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==\" data-src=\"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig7.png?pub-status=live\" class=\"aop-lazy-load-image\" width=\"2833\" height=\"1203\" data-original-image=\"\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig7.png\" data-zoomable=\"true\"\u003E\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"caption\"\u003E\u003Cp class=\"p\"\u003E \n\u003C\u002Fp\u003E\u003Cp class=\"p\"\u003E\u003Cspan class=\"label\"\u003EFigure 7.\u003C\u002Fspan\u003E Overall picture of the logistic scenario including an abstract representation of vision-based recognition and localization algorithm (left), snapshot of the robotic depalletizing cell (right) with highlighted detail of the gripping tool (red window).\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\u003C\u002Fdiv\u003E\u003C\u002Fsection\u003E\n\n\u003C\u002Fdiv\u003E\n\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"sec other\" data-magellan-destination=\"s7\" id=\"s7\"\u003E\n\n\u003Ch2 class=\"A\"\u003E\u003Cspan class=\"label\"\u003E7.\u003C\u002Fspan\u003E Medical robotics\u003C\u002Fh2\u003E\n\u003Cp class=\"p\"\u003E Medical robotics is a fast-growing field that integrates the principles of robotics with healthcare to advance medical procedures and enhance patient outcomes. Its primary objective is to develop cutting-edge robotic systems, devices, and technologies that cater to a wide range of medical domains, including surgery, rehabilitation, diagnosis, and patient care. In the realm of medical robotics, surgical robotics stands out as a specialized field dedicated to the development and application of robotic systems in surgical procedures. In this context, prioritizing safety is crucial, especially in robotic systems categorized as critical, where it serves as a fundamental design focus. In the quest for heightened safety and decreased cognitive burden, the shared control paradigm has played a crucial role, notably with the integration of active constraints. This methodology has given rise to specialized applications like Virtual Fixtures (VFs), which have garnered increasing popularity in recent years [\u003Ca class=\"xref bibr\" href=\"#ref135\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Bowyer, Davies and Baena\u003C\u002Fspan\u003E135\u003C\u002Fa\u003E]. VFs act as virtual overlays, delivering guidance and support to surgeons during procedures and offering a diverse array of functionalities. When integrated with haptic feedback or guidance, the use of VFs in surgical teleoperated robots frequently offers active assistance to the surgeon through force rendering at the master side. As an example, Li et al. introduced an online collision avoidance method for the real-time interactive control of a surgical robot in complex environments, like the sinus cavities [\u003Ca class=\"xref bibr\" href=\"#ref136\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Li, Ishii and Taylor\u003C\u002Fspan\u003E136\u003C\u002Fa\u003E]. The push for autonomous tasks in surgery stems from a drive to enhance precision and efficiency while relieving surgeons of cognitive workload in minimally invasive procedures. The advancement of surgical robots frequently entails the creation of innovative control laws using constrained optimization techniques [\u003Ca class=\"xref bibr\" href=\"#ref137\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Marinho, Adorno, k. and Mitsuishi\u003C\u002Fspan\u003E137\u003C\u002Fa\u003E]. Ensuring the safety of robots in dynamic environments, particularly in robotics, has been significantly aided by the emergence of the Control Barrier Functions (CBFs) framework, as highlighted in [\u003Ca class=\"xref bibr\" href=\"#ref138\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Ames, Coogan, Egerstedt, Notomista, Sreenath and Tabuada\u003C\u002Fspan\u003E138\u003C\u002Fa\u003E]. Advances in surgical robotics research extend beyond software applications, encompassing the innovation of hardware devices designed to streamline surgeons’ tasks and elevate their performance capabilities. A motorized hand offers an ergonomic alternative, and researched sensor designs prioritize force sensation for advantages in robotic surgery, such as injury reduction and palpation empowerment [\u003Ca class=\"xref bibr\" href=\"#ref139\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Kim, Kim, Seok, So and Choi\u003C\u002Fspan\u003E139\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref140\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Lee, Kim, Gulrez, Yoon, Hannaford and Choi\u003C\u002Fspan\u003E140\u003C\u002Fa\u003E]. In addition to surgical applications, medical robotic research has also advanced the development of sophisticated devices for artificial limbs. Drawing inspiration from the human hand, robotic hands have incorporated compliance and sensors through various technological solutions to enhance robustness by absorbing external impact and improve capabilities in object grasping and manipulation [\u003Ca class=\"xref bibr\" href=\"#ref141\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Catalano, Grioli, Farnioli, Serio, Piazza and Bicchi\u003C\u002Fspan\u003E141\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref142\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Piazza, Catalano, Godfrey, Rossi, Grioli, Bianchi, Zhao and Bicchi\u003C\u002Fspan\u003E142\u003C\u002Fa\u003E]. Table \u003Ca class=\"xref table\" href=\"#tblVI\"\u003EVI\u003C\u002Fa\u003E provides a classification of the recent contributions to the field. \u003C\u002Fp\u003E\u003Cdiv class=\"table-wrap\" data-magellan-destination=\"tblVI\" id=\"tblVI\"\u003E\n\n\u003Cdiv class=\"caption\"\u003E\n\u003Cp class=\"p\"\u003E\u003Cspan class=\"label\"\u003ETable VI.\u003C\u002Fspan\u003E Summary of PRISMA Lab contributions in the field of medical robotics.\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\n\u003Cspan\u003E\n\n\u003Cdiv class=\"figure-thumb\"\u003E\u003Cimg src=\"data:image\u002Fgif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==\" data-src=\"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab6.png?pub-status=live\" class=\"aop-lazy-load-image\" width=\"678\" height=\"533\" data-original-image=\"\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab6.png\" data-zoomable=\"true\"\u003E\u003C\u002Fdiv\u003E\n\u003C\u002Fspan\u003E\n\u003C\u002Fdiv\u003E\n\n\u003Cdiv class=\"sec\" data-magellan-destination=\"s7-1\" id=\"s7-1\"\u003E\n\n\u003Ch3 class=\"B\"\u003E\u003Cspan class=\"label\"\u003E7.1.\u003C\u002Fspan\u003E Surgical robotics\u003C\u002Fh3\u003E\n\u003Cp class=\"p\"\u003E Surgical robotics transformed surgery, progressing from open to minimally invasive and robot-assisted procedures. While open surgery involves large incisions and minimally invasive surgery uses small incisions, robot-assisted surgery utilizes robotic systems to enhance patient outcomes by reducing trauma, recovery times, and risks. However, there are ongoing constraints in accuracy, speed, dexterity, flexibility, and specialized skills. Research and development efforts are dedicated to overcoming these limitations and expanding the applications of robotic systems. Safety in surgical procedures is paramount, and advanced control systems with active constraints like VFs enhance safety and reduce cognitive load. VFs provide virtual guidance and assistance to surgeons through simulated barriers (Forbidden Regions Virtual Fixtures – FRVFs) and attractive forces (Guidance Virtual Fixtures – GVFs), improving surgical outcomes. A novel approach was employed for the precise dissection of polyps in surgical procedures, ensuring accurate detection of the region of interest and high-precision cutting with safety margins [\u003Ca class=\"xref bibr\" href=\"#ref143\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Moccia, Selvaggio, Villani, Siciliano and Ficuciello\u003C\u002Fspan\u003E143\u003C\u002Fa\u003E]. The method utilized a control approach based on GVFs to constrain the robot’s motion along the dissection path. VFs were created using computer vision techniques, extracting control points from surgical scene images and dynamically updating them to adapt to environmental changes. The effectiveness of the approach was validated through experiments on the da Vinci Research Kit (dVRK) robot, an open-source platform based on the famous da Vinci\u003Csup class=\"sup\"\u003E®\u003C\u002Fsup\u003E Surgical System. In the context of enhancing the suturing process with the dVRK robot, a similar approach was introduced, leveraging vision-based tracking techniques for precise needle tracking [\u003Ca class=\"xref bibr\" href=\"#ref94\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Selvaggio, A.Ghalamzan, Moccia, Ficuciello and Siciliano\u003C\u002Fspan\u003E94\u003C\u002Fa\u003E]. The system was applied in conjunction with the haptic VF control technique using dVRK, mitigating the risk of joint limits and singularities during suturing. The optimal grasp pose was utilized to calculate force cues that guided the user’s hand through the Master Tool Manipulator. The paper in [\u003Ca class=\"xref bibr\" href=\"#ref144\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Moccia, Iacono, Siciliano and Ficuciello\u003C\u002Fspan\u003E144\u003C\u002Fa\u003E] presented an example of FRVF application in the form of a surgical tools collision avoidance method. FRVFs were utilized to prevent tool collisions by generating a repulsive force for the surgeon. A marker-less tool tracking method employing a deep neural network architecture for tool segmentation was adopted (see Fig. \u003Ca class=\"xref fig\" href=\"#f8\"\u003E8\u003C\u002Fa\u003E). This work proposed the use of an Extended Kalman Filter for pose estimation to enhance the robustness of VF application on the tool by incorporating both vision and kinematics information. Software applications are moving also toward increasing the autonomy in surgical robotics. For instance, the paper in [\u003Ca class=\"xref bibr\" href=\"#ref148\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Moccia and Ficuciello\u003C\u002Fspan\u003E148\u003C\u002Fa\u003E] presented an autonomous endoscope control algorithm for the dVRK’s Endoscopic Camera Manipulator in surgical robotics. It employed Image-based Visual Servoing (IBVS) with additional constraints enforced by CBFs to ensure instrument visibility and prevent joint limit violations. Laparoscopic images were used, and deep learning was applied for semantic segmentation. The algorithm configured an IBVS controller and solved a convex optimization problem to satisfy the constraints. The solutions mentioned earlier were tested in a simulated environment using the CoppeliaSim software, with a particular focus on the presentation of the dVRK simulator [\u003Ca class=\"xref bibr\" href=\"#ref149\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Ferro, Brunori, Magistri, Saiella, Selvaggio and Fontanelli\u003C\u002Fspan\u003E149\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref150\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Fontanelli, Selvaggio, Ferro, Ficuciello, Vendittelli and Siciliano\u003C\u002Fspan\u003E150\u003C\u002Fa\u003E].\u003C\u002Fp\u003E\n\u003Cp class=\"p\"\u003E Research advancements in surgical robotics encompass not only software applications but also the development of hardware devices that aim to facilitate surgeons’ jobs and enhance their performance. The MUSHA Hand II, a multifunctional surgical instrument with underactuated soft fingers ( [\u003Ca class=\"xref bibr\" href=\"#ref151\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Ghafoor, Dai and Duffy\u003C\u002Fspan\u003E151\u003C\u002Fa\u003E]) and force sensors, was integrated into the da Vinci\u003Csup class=\"sup\"\u003E®\u003C\u002Fsup\u003E robotic platform [\u003Ca class=\"xref bibr\" href=\"#ref145\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Liu, Selvaggio, Ferrentino, Moccia, Pirozzi, Bracale and Ficuciello\u003C\u002Fspan\u003E145\u003C\u002Fa\u003E–\u003Ca class=\"xref bibr\" href=\"#ref147\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Selvaggio, Fontanelli, Marrazzo, Bracale, Irace, Breglio, Villani, Siciliano and Ficuciello\u003C\u002Fspan\u003E147\u003C\u002Fa\u003E], shown in Fig. \u003Ca class=\"xref fig\" href=\"#f8\"\u003E8\u003C\u002Fa\u003E. This innovative hand enhances the adaptability and functionality of the surgical system, addressing limitations in force sensing during robot-assisted surgery. Experimental validation was performed on the dVRK robotic testbed. The paper in refs. [\u003Ca class=\"xref bibr\" href=\"#ref23\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Fontanelli, Selvaggio, Buonocore, Ficuciello, Villani and Siciliano\u003C\u002Fspan\u003E23\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref152\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Sallam, Fontanelli, Gallo, La Rocca, Di Spiezio Sardo, Longo and Ficuciello\u003C\u002Fspan\u003E152\u003C\u002Fa\u003E] introduces a novel single-handed needle driver tool inspired by human hand-rolling abilities. It includes a working prototype and is tested with the dVRK surgical system. Robotic solutions are also created to solve specific surgical procedures, like prostate cancer biopsy. The paper in [\u003Ca class=\"xref bibr\" href=\"#ref153\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Coevoet, Adagolodjo, Lin, Duriez and Ficuciello\u003C\u002Fspan\u003E153\u003C\u002Fa\u003E] presented a robotic solution for transrectal prostate biopsy, showcasing a soft-rigid robot manipulator with an integrated probe-needle assembly. The system included manual positioning of the probe and autonomous alignment of the needle, along with MRI-US fusion for improved visualization. Experimental validation was conducted using prostate phantoms.\u003C\u002Fp\u003E\u003Csection\u003E\u003Cdiv class=\"fig\" data-magellan-destination=\"f8\" id=\"f8\"\u003E\n\n\n\u003Cdiv class=\"figure-thumb\"\u003E\u003Cimg src=\"data:image\u002Fgif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==\" data-src=\"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig8.png?pub-status=live\" class=\"aop-lazy-load-image\" width=\"4251\" height=\"1436\" data-original-image=\"\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig8.png\" data-zoomable=\"true\"\u003E\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"caption\"\u003E\u003Cp class=\"p\"\u003E \n\u003C\u002Fp\u003E\u003Cp class=\"p\"\u003E\u003Cspan class=\"label\"\u003EFigure 8.\u003C\u002Fspan\u003E Left: a marker-less method tracks surgical tools, establishing VF geometry resembling to a cylinder with its central axis aligned with the instrument’s axis [\u003Ca class=\"xref bibr\" href=\"#ref144\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Moccia, Iacono, Siciliano and Ficuciello\u003C\u002Fspan\u003E144\u003C\u002Fa\u003E]; right: the MUSHA Hand II surgical tool, integrated on the dVRK robot [\u003Ca class=\"xref bibr\" href=\"#ref145\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Liu, Selvaggio, Ferrentino, Moccia, Pirozzi, Bracale and Ficuciello\u003C\u002Fspan\u003E145\u003C\u002Fa\u003E–\u003Ca class=\"xref bibr\" href=\"#ref147\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Selvaggio, Fontanelli, Marrazzo, Bracale, Irace, Breglio, Villani, Siciliano and Ficuciello\u003C\u002Fspan\u003E147\u003C\u002Fa\u003E].\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\u003C\u002Fdiv\u003E\u003C\u002Fsection\u003E\n\n\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"sec\" data-magellan-destination=\"s7-2\" id=\"s7-2\"\u003E\n\n\u003Ch3 class=\"B\"\u003E\u003Cspan class=\"label\"\u003E7.2.\u003C\u002Fspan\u003E Robotic hands and prosthesis\u003C\u002Fh3\u003E\n\u003Cp class=\"p\"\u003E Robotic artificial limbs have played a crucial role in aiding individuals with missing body parts to regain functionality in their daily life activities. The PRISMA Hand II, depicted in Fig. \u003Ca class=\"xref fig\" href=\"#f9\"\u003E9\u003C\u002Fa\u003E, represented a mechanically robust anthropomorphic hand with high underactuation, utilizing three motors to drive 19 joints through elastic tendons. Its distinctive mechanical design facilitated adaptive grasping and in-hand manipulation, complemented by tactile\u002Fforce sensors embedded in each fingertip. Based on optoelectronic technology, these sensors provided valuable tactile\u002Fforce feedback during object manipulation, particularly for deformable objects. The paper in [\u003Ca class=\"xref bibr\" href=\"#ref154\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Canbay, Ferrentino, Liu, Moccia, Pirozzi, Siciliano and Ficuciello\u003C\u002Fspan\u003E154\u003C\u002Fa\u003E] detailed the hand’s mechanical design, sensor technology, and proposed a calibration procedure for the tactile\u002Fforce sensors. It included a comparison of various neural network architectures for sensor calibration, experimental tests to determine the optimal tactile sensing suite, and demonstrations of force regulation effectiveness using calibrated sensors. The paper also introduced a virtual simulator for users to undergo training sessions in controlling the prosthesis. Surface Electromyographic (sEMG) sensors captured muscle signals from the user, processed by a recognition algorithm to interpret the patient’s intentions [\u003Ca class=\"xref bibr\" href=\"#ref155\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Leccia, Sallam, Grazioso, Caporaso, Di Gironimo and Ficuciello\u003C\u002Fspan\u003E155\u003C\u002Fa\u003E].\u003C\u002Fp\u003E\u003Csection\u003E\u003Cdiv class=\"fig\" data-magellan-destination=\"f9\" id=\"f9\"\u003E\n\n\n\u003Cdiv class=\"figure-thumb\"\u003E\u003Cimg src=\"data:image\u002Fgif;base64,R0lGODlhAQABAIAAAMLCwgAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==\" data-src=\"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig9.png?pub-status=live\" class=\"aop-lazy-load-image\" width=\"4249\" height=\"2159\" data-original-image=\"\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_fig9.png\" data-zoomable=\"true\"\u003E\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"caption\"\u003E\u003Cp class=\"p\"\u003E \n\u003C\u002Fp\u003E\u003Cp class=\"p\"\u003E\u003Cspan class=\"label\"\u003EFigure 9.\u003C\u002Fspan\u003E The PRISMA Hand II and its capabilities. The grasping options are categorized into three sets: (a) lateral grasps, (b) pinches, and (c) power grasps [\u003Ca class=\"xref bibr\" href=\"#ref154\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Canbay, Ferrentino, Liu, Moccia, Pirozzi, Siciliano and Ficuciello\u003C\u002Fspan\u003E154\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref155\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Leccia, Sallam, Grazioso, Caporaso, Di Gironimo and Ficuciello\u003C\u002Fspan\u003E155\u003C\u002Fa\u003E].\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\u003C\u002Fdiv\u003E\u003C\u002Fsection\u003E\n\n\u003C\u002Fdiv\u003E\n\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"sec other\" data-magellan-destination=\"s8\" id=\"s8\"\u003E\n\n\u003Ch2 class=\"A\"\u003E\u003Cspan class=\"label\"\u003E8.\u003C\u002Fspan\u003E Future Directions\u003C\u002Fh2\u003E\n\u003Cdiv class=\"sec\" data-magellan-destination=\"s8-1\" id=\"s8-1\"\u003E\n\n\u003Ch3 class=\"B\"\u003E\u003Cspan class=\"label\"\u003E8.1.\u003C\u002Fspan\u003E Dynamic manipulation and locomotion\u003C\u002Fh3\u003E\n\u003Cp class=\"p\"\u003E Manipulation and locomotion represent two research areas that require explicit or implicit control of the interaction forces and the enforcement of the related frictional constraints. Mastering in-contact situations through accurate force regulation will allow legged or service robots of the future to perform several difficult tasks with unprecedented precision and robustness [\u003Ca class=\"xref bibr\" href=\"#ref156\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Gong, Sun, Nair, Bidwai, R., Grezmak, Sartoretti and Daltorio\u003C\u002Fspan\u003E156\u003C\u002Fa\u003E]. These include dealing with time-varying or switching contacts with the environment and manipulating or locomoting on articulated, foldable, or even continuously deformable surfaces. In both fields, the synthesis of novel mechanisms is always a meaningful aspect [\u003Ca class=\"xref bibr\" href=\"#ref157\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Jia, Huang, Li, Wu, Cao and Guo\u003C\u002Fspan\u003E157\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref158\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Jia, Huang, Wang and Li\u003C\u002Fspan\u003E158\u003C\u002Fa\u003E]. Solving complex tasks requiring simultaneous locomotion and manipulation (commonly referred to as loco-manipulation) using, for example, quadruped robots equipped with an arm, is a very active topic of research. Future works should focus on optimizing the robustness of loco-manipulation trajectories against unknown external disturbances or develop control techniques for safe interaction with humans [\u003Ca class=\"xref bibr\" href=\"#ref159\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Bellicoso, Krämer, Stäuble, Sako, Jenelten, Bjelonic and Hutter\u003C\u002Fspan\u003E159\u003C\u002Fa\u003E, \u003Ca class=\"xref bibr\" href=\"#ref160\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Ferrolho, Ivan, Merkt, Havoutis and Vijayakumar\u003C\u002Fspan\u003E160\u003C\u002Fa\u003E]. This will raise the need for improving proprioceptive and exteroceptive perception techniques to accurately retrieve the actual state of the robot and the environment in contact. The combined use of multiple vision, force and tactile sensors, and fusion techniques constitute a promising approach in this direction [\u003Ca class=\"xref bibr\" href=\"#ref161\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Costanzo, Natale and Selvaggio\u003C\u002Fspan\u003E161\u003C\u002Fa\u003E]. Another future research direction includes the development of improved policy representation and learning or planning frameworks to handle difficult tasks. In other words, finding mappings from the task requirements and sensor feedback to controller inputs for in-contact tasks is still carried out with difficulties. The development of an accurate yet fast physics engine to simulate in-contact tasks with constrained environments will favor this and allow for better policy transfer to handle difficult tasks that can be learned in simulation before being deployed to the real world.\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"sec\" data-magellan-destination=\"s8-2\" id=\"s8-2\"\u003E\n\n\u003Ch3 class=\"B\"\u003E\u003Cspan class=\"label\"\u003E8.2.\u003C\u002Fspan\u003E Aerial robotics\u003C\u002Fh3\u003E\n\u003Cp class=\"p\"\u003E Energy saving, safety in the interactions with people and objects, accuracy, and reliable decisional autonomy pose significant limitations in aerial systems. Future challenges involve power consumption and short-lived batteries, while uncertified devices prompt safety restrictions. Several roadmaps emphasize the need for aerial devices to function in real-world scenarios, facing inclement weather and requiring proper certifications. Mechatronics is crucial for both UAMs. Despite progress, challenges persist in enhancing safety and energy efficiency. Integrating mechanical design and control is essential, with a lack of research on the optimal positioning of grasping tools for UAMs. Hybrid mechatronic solutions are potential avenues for improvement.\u003C\u002Fp\u003E\n\u003Cp class=\"p\"\u003E Opportunities come from inspection and maintenance tasks for aerial manipulators, such as replacing human operators in remote locations, handling hazardous tasks, and increasing plant inspections. Achieving these goals requires addressing outlined issues and improving environmental performance. While aerial manipulation activities are primarily in academia, recent European-funded projects like AIRobots, ARCAS, SHERPA, EuRoC, Aeroworks, AEROARMS, AERO-TRAIN, and AERIAL-CORE aim to bridge the gap between academia and industry. The AEROARMS project received the European Commission Innovation Radar Prize, showcasing advancements. However, the technology migration remains a challenging journey.\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"sec\" data-magellan-destination=\"s8-3\" id=\"s8-3\"\u003E\n\n\u003Ch3 class=\"B\"\u003E\u003Cspan class=\"label\"\u003E8.3.\u003C\u002Fspan\u003E Physical human-robot interaction\u003C\u002Fh3\u003E\n\u003Cp class=\"p\"\u003E In future works, the proposed HRI frameworks can be extended to integrate multiple interaction modalities other than physical. For instance, visual and audio feedback may provide additional information about the robot’s state to improve readability, safety, and reliability during the assisted modes. In addition, gesture-based and speech-based interaction modalities may complement physical interaction to enable a more natural human-robot communication, while enhancing the robustness of intention estimation.\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"sec\" data-magellan-destination=\"s8-4\" id=\"s8-4\"\u003E\n\n\u003Ch3 class=\"B\"\u003E\u003Cspan class=\"label\"\u003E8.4.\u003C\u002Fspan\u003E AI and cognitive robotics\u003C\u002Fh3\u003E\n\u003Cp class=\"p\"\u003E In our ongoing research activities, we aim to develop an integrated robotic executive framework supporting long-term autonomy in complex operative scenarios. For this purpose, our goal is to investigate incremental task teaching and adaptation methods, progressing from primitive to complex robotic tasks. In this direction, symbolic and sub-symbolic learning methods can be integrated to simultaneously learn hierarchical tasks, sensorimotor processes, and attention regulations through human demonstrations and environmental interaction. In this setting, effective mechanisms are also needed to retrieve and reuse learned tasks depending on the operational and the environmental context. Concerning natural human-robot collaboration, we are currently investigating additional attention mechanisms (e.g., joint attention, active perception, affordances, etc.) that play a crucial role in supporting task teaching and adaptive execution. Regarding combined task and motion planning methods, our aim is to formulate more sophisticated metrics and to address hierarchically structured tasks of mobile manipulation.\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"sec\" data-magellan-destination=\"s8-5\" id=\"s8-5\"\u003E\n\n\u003Ch3 class=\"B\"\u003E\u003Cspan class=\"label\"\u003E8.5.\u003C\u002Fspan\u003E Industrial robotics\u003C\u002Fh3\u003E\n\u003Cp class=\"p\"\u003E As a future research direction, the flexible and adaptive architecture for depalletizing tasks in supermarkets proposed in [\u003Ca class=\"xref bibr\" href=\"#ref132\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Caccavale, Arpenti, Paduano, Fontanellli, Lippiello, Villani and Siciliano\u003C\u002Fspan\u003E132\u003C\u002Fa\u003E] will be extended also to palletizing tasks or other industrial scenarios, such as packaging [\u003Ca class=\"xref bibr\" href=\"#ref162\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Dai and Caldwell\u003C\u002Fspan\u003E162\u003C\u002Fa\u003E]. Moreover, more complex environmental conditions along with more sophisticated task structures including safety constraints and fault detection\u002Fcorrection will be investigated. Regarding the vision side, the segmentation accuracy, as well as, the depalletization speed of the algorithms deployed in the framework [\u003Ca class=\"xref bibr\" href=\"#ref133\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Arpenti, Caccavale, Paduano, Fontanelli, Lippiello, Villani and Siciliano\u003C\u002Fspan\u003E133\u003C\u002Fa\u003E] will be exhaustively compared with the performance of convolutional neural networks and support vector machines. Besides, multiple images from different perspectives will be exploited in a multi-camera approach to better estimate the poses of the cases. Regarding the gripping tool [\u003Ca class=\"xref bibr\" href=\"#ref134\"\u003E\u003Cspan class=\"show-for-sr\"\u003EReference Fontanelli, Paduano, Caccavale, Arpenti, Lippiello, Villani and Siciliano\u003C\u002Fspan\u003E134\u003C\u002Fa\u003E], more compact suction systems will be developed to find the best tradeoff between dimensions, weight, and effectiveness for each type of product.\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"sec\" data-magellan-destination=\"s8-6\" id=\"s8-6\"\u003E\n\n\u003Ch3 class=\"B\"\u003E\u003Cspan class=\"label\"\u003E8.6.\u003C\u002Fspan\u003E Medical robotics\u003C\u002Fh3\u003E\n\u003Cp class=\"p\"\u003E Charting the course for the future of medical robotics, especially in the surgical domain, entails a pivotal shift towards the incorporation of cutting-edge AI techniques. This evolution seeks to broaden the applicability of proposed methodologies to embrace realistic surgical scenarios, effectively navigating challenges posed by tissue deformation and occlusions. Rigorous studies on medical procedures will be conducted to precisely define safety standards, ensuring a meticulous approach to healthcare practices. As a conclusive step, collaborative validation with surgeons will serve as a tangible testament to the effectiveness of the proposed pipelines, affirming their real-world impact in enhancing surgical precision and safety. In the realm of advancing robotic surgical instruments and artificial limbs, future trajectories point towards expanding the capabilities of proposed devices to cater to more specific scenarios. This evolution involves a strategic integration of tailored characteristics, incorporating cutting-edge sensing technologies and intelligent control strategies. Having demonstrated the potential applications of these devices, the ongoing endeavor is to refine their design for optimal performance across an array of surgical tasks. The ultimate objective lies in seamlessly transferring these innovations from the realm of development to practical clinical applications, ushering in a new era of enhanced surgical precision and functional prosthetic applications.\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\n\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"sec conclusions\" data-magellan-destination=\"s9\" id=\"s9\"\u003E\n\n\u003Ch2 class=\"A\"\u003E\u003Cspan class=\"label\"\u003E9.\u003C\u002Fspan\u003E Conclusion\u003C\u002Fh2\u003E\n\u003Cp class=\"p\"\u003E In this article, we overviewed the main results achieved by the robotics research carried out at the PRISMA Lab of the University of Naples Federico II during the last decade. After a brief overview, the key contributions to the six research areas of dynamic manipulation and locomotion, aerial robotics, physical HRI, AI and cognitive robotics, industrial robotics, and medical robotics were briefly reported and discussed together with future research directions. We highlighted the main achievements in each of these areas, categorizing the adopted methodologies and the key contributions in the fields.\u003C\u002Fp\u003E\n\u003Cp class=\"p\"\u003E Our dream and goal for the future is to make scientific and technological research advancements in all the considered areas more accessible to other people around the world who may be able to use it for their purposes or needs. From this, significant breakthroughs are expected in the future for the industry, health, education, economic, and social sectors.\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\n\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"back\"\u003E\n\u003Cdiv class=\"sec other\" data-magellan-destination=\"s10\" id=\"s10\"\u003E\n\u003Ch2 class=\"A\"\u003E Author contribution\u003C\u002Fh2\u003E\n\u003Cp class=\"p\"\u003E Bruno Siciliano conceived the article. Mario Selvaggio, Rocco Moccia, Pierluigi Arpenti, Riccardo Caccavale, and Fabio Ruggiero wrote the manuscript under the supervision of the rest of the authors who reviewed and edited it.\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"sec funding-statement\" data-magellan-destination=\"s11\" id=\"s11\"\u003E\n\u003Ch2 class=\"A\"\u003E Financial support\u003C\u002Fh2\u003E\n\u003Cp class=\"p\"\u003E The research leading to these results has been partially supported by the following projects: COWBOT, grant 2020NH7EAZ_002, PRIN 2020; AI-DROW, grant 2022BYSBYX, PRIN 2022 PNRR, European Union – NextGenerationEU; Harmony, grant 101017008, European Union’s Horizon 2020; Inverse, grant 101136067, and euROBIN, grant 101070596, European Union’s Horizon Europe; BRIEF, IC IR0000036, National Recovery and Resilience Plan, Mission 4 Component 2 Investment 3.1 of Italian Ministry of University and Research funded by the European Union – NextGenerationEU.\u003C\u002Fp\u003E\n\u003Cp class=\"p\"\u003E The views and opinions expressed are only those of the authors and do not necessarily reflect those of the funding agencies.\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"sec coi-statement\" data-magellan-destination=\"s12\" id=\"s12\"\u003E\n\u003Ch2 class=\"A\"\u003E Competing interests\u003C\u002Fh2\u003E\n\u003Cp class=\"p\"\u003E The authors declare no competing interests exist.\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\n\u003Cdiv class=\"sec other\" data-magellan-destination=\"s13\" id=\"s13\"\u003E\n\u003Ch2 class=\"A\"\u003E Ethical approval\u003C\u002Fh2\u003E\n\u003Cp class=\"p\"\u003E None.\u003C\u002Fp\u003E\n\u003C\u002Fdiv\u003E\n\n\u003C\u002Fdiv\u003E\n\u003C\u002Fdiv\u003E",tableOfContent:[{level:p,current:c,title:"\u003Cdiv class=\"toc-title\"\u003EIntroduction\u003C\u002Fdiv\u003E",url:"s1"},{level:p,current:c,title:"\u003Cdiv class=\"toc-title\"\u003EDynamic manipulation and locomotion\u003C\u002Fdiv\u003E",url:"s2"},{level:p,current:c,title:"\u003Cdiv class=\"toc-title\"\u003EAerial robotics\u003C\u002Fdiv\u003E",url:"s3"},{level:p,current:c,title:"\u003Cdiv class=\"toc-title\"\u003EPhysical human-robot interaction\u003C\u002Fdiv\u003E",url:"s4"},{level:p,current:c,title:"\u003Cdiv class=\"toc-title\"\u003EAI and cognitive robotics\u003C\u002Fdiv\u003E",url:"s5"},{level:p,current:c,title:"\u003Cdiv class=\"toc-title\"\u003EIndustrial robotics\u003C\u002Fdiv\u003E",url:"s6"},{level:p,current:c,title:"\u003Cdiv class=\"toc-title\"\u003EMedical robotics\u003C\u002Fdiv\u003E",url:"s7"},{level:p,current:c,title:"\u003Cdiv class=\"toc-title\"\u003EFuture Directions\u003C\u002Fdiv\u003E",url:"s8"},{level:p,current:c,title:"\u003Cdiv class=\"toc-title\"\u003EConclusion\u003C\u002Fdiv\u003E",url:"s9"},{level:p,current:c,title:"\u003Cdiv class=\"toc-title\"\u003EAuthor contribution\u003C\u002Fdiv\u003E",url:"s10"},{level:p,current:c,title:"\u003Cdiv class=\"toc-title\"\u003EFinancial support\u003C\u002Fdiv\u003E",url:"s11"},{level:p,current:c,title:"\u003Cdiv class=\"toc-title\"\u003ECompeting interests\u003C\u002Fdiv\u003E",url:"s12"},{level:p,current:c,title:"\u003Cdiv class=\"toc-title\"\u003EEthical approval\u003C\u002Fdiv\u003E",url:"s13"}],footnotes:[],fulltextNotes:[],references:[{id:"ref1",displayNumber:"[1]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESuomalainen\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EKarayiannidis\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EY.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EKyrki\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EA survey of robot manipulation in contact\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003ERobot Auton Syst\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E156\u003C\u002Fspan\u003E, \u003Cspan class=\"fpage\"\u003E104224\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2022\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for A survey of robot manipulation in contact' href=https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.robot.2022.104224\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for A survey of robot manipulation in contact' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+survey+of+robot+manipulation+in+contact&author=Suomalainen+M.&author=Karayiannidis+Y.&author=Kyrki+V.&publication+year=2022&journal=Robot+Auton+Syst&volume=156&doi=10.1016%2Fj.robot.2022.104224\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+survey+of+robot+manipulation+in+contact&author=Suomalainen+M.&author=Karayiannidis+Y.&author=Kyrki+V.&publication+year=2022&journal=Robot+Auton+Syst&volume=156&doi=10.1016%2Fj.robot.2022.104224",openUrlParams:{genre:e,atitle:aB,jtitle:r,title:r,volume:"156",artnum:"65f99c3d442f7c0001a347c2",spage:"104224",epage:f,date:j,sid:d,aulast:a,aufirst:a,doi:aC,au:a},innerRefId:"r1",title:aB,doi:aC,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.robot.2022.104224",pubMedLink:a}]},{id:"ref2",displayNumber:"[2]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EYang\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EC.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EZhang\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EZeng\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EAgrawal\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESreenath\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EK.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EDynamic Legged Manipulation of a Ball Through Multi-Contact Optimization\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EIEEE\u002FRSJ International Conference on Intelligent Robots and Systems (IROS)\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2020\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E7513\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E7520\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Dynamic Legged Manipulation of a Ball Through Multi-Contact Optimization' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Dynamic+Legged+Manipulation+of+a+Ball+Through+Multi-Contact+Optimization&author=Yang+C.&author=Zhang+B.&author=Zeng+J.&author=Agrawal+A.&author=Sreenath+K.&publication+year=2020\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Dynamic+Legged+Manipulation+of+a+Ball+Through+Multi-Contact+Optimization&author=Yang+C.&author=Zhang+B.&author=Zeng+J.&author=Agrawal+A.&author=Sreenath+K.&publication+year=2020",openUrlParams:{genre:h,date:l,sid:d,title:aD},innerRefId:"r2",title:aD,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref3",displayNumber:"[3]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESerra\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EDonaire\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBuonocore\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EL. R.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EControl of nonprehensile planar rolling manipulation: A passivity-based approach\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Trans Robot\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E35\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E2\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E317\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E329\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2019\u003C\u002Fspan\u003Ea).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Control of nonprehensile planar rolling manipulation: A passivity-based approach' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTRO.2018.2887356\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Control of nonprehensile planar rolling manipulation: A passivity-based approach' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Control+of+nonprehensile+planar+rolling+manipulation%3A+A+passivity-based+approach&author=Serra+D.&author=Ruggiero+F.&author=Donaire+A.&author=Buonocore+L.+R.&author=Lippiello+V.&author=Siciliano+B.&publication+year=2019&journal=IEEE+Trans+Robot&volume=35&doi=10.1109%2FTRO.2018.2887356&pages=317-329\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Control+of+nonprehensile+planar+rolling+manipulation%3A+A+passivity-based+approach&author=Serra+D.&author=Ruggiero+F.&author=Donaire+A.&author=Buonocore+L.+R.&author=Lippiello+V.&author=Siciliano+B.&publication+year=2019&journal=IEEE+Trans+Robot&volume=35&doi=10.1109%2FTRO.2018.2887356&pages=317-329",openUrlParams:{genre:e,atitle:aE,jtitle:m,title:m,volume:aF,artnum:"65f99c3d442f7c0001a347c4",spage:aG,epage:aH,date:k,sid:d,aulast:a,aufirst:a,doi:aI,au:a},innerRefId:"r3",title:aE,doi:aI,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTRO.2018.2887356",pubMedLink:a}]},{id:"ref4",displayNumber:"[4]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EChai\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EC.-Y.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EPeng\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EW.-H.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ETsao\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.-L.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EObject rearrangement through planar pushing: A theoretical analysis and validation\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE T Robot\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E38\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E5\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E2703\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E2719\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2022\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Object rearrangement through planar pushing: A theoretical analysis and validation' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTRO.2022.3153785\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Object rearrangement through planar pushing: A theoretical analysis and validation' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Object+rearrangement+through+planar+pushing%3A+A+theoretical+analysis+and+validation&author=Chai+C.-Y.&author=Peng+W.-H.&author=Tsao+S.-L.&publication+year=2022&journal=IEEE+T+Robot&volume=38&doi=10.1109%2FTRO.2022.3153785&pages=2703-2719\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Object+rearrangement+through+planar+pushing%3A+A+theoretical+analysis+and+validation&author=Chai+C.-Y.&author=Peng+W.-H.&author=Tsao+S.-L.&publication+year=2022&journal=IEEE+T+Robot&volume=38&doi=10.1109%2FTRO.2022.3153785&pages=2703-2719",openUrlParams:{genre:e,atitle:aJ,jtitle:aK,title:aK,volume:N,artnum:"65f99c3d442f7c0001a347c5",spage:"2703",epage:"2719",date:j,sid:d,aulast:a,aufirst:a,doi:aL,au:a},innerRefId:"r4",title:aJ,doi:aL,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTRO.2022.3153785",pubMedLink:a}]},{id:"ref5",displayNumber:"[5]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESatici\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ECoordinate-Free Framework for Robotic Pizza Tossing and Catching\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EIEEE International Conference on Robotics and Automation\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2016\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E3932\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E3939\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Coordinate-Free Framework for Robotic Pizza Tossing and Catching' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Coordinate-Free+Framework+for+Robotic+Pizza+Tossing+and+Catching&author=Satici+A.&author=Ruggiero+F.&author=Lippiello+V.&author=Siciliano+B.&publication+year=2016\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Coordinate-Free+Framework+for+Robotic+Pizza+Tossing+and+Catching&author=Satici+A.&author=Ruggiero+F.&author=Lippiello+V.&author=Siciliano+B.&publication+year=2016",openUrlParams:{genre:h,date:v,sid:d,title:aM},innerRefId:"r5",title:aM,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref6",displayNumber:"[6]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ENonprehensile dynamic manipulation: A survey\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Robot Auto Lett\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E3\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E3\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E1711\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E1718\u003C\u002Fspan\u003E(\u003Cspan class=\"year\"\u003E2018\u003C\u002Fspan\u003Eb).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Nonprehensile dynamic manipulation: A survey' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2018.2801939\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Nonprehensile dynamic manipulation: A survey' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Nonprehensile+dynamic+manipulation%3A+A+survey&author=Ruggiero+F.&author=Lippiello+V.&author=Siciliano+B.&publication+year=2018&journal=IEEE+Robot+Auto+Lett&volume=3&doi=10.1109%2FLRA.2018.2801939&pages=1711-1718\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Nonprehensile+dynamic+manipulation%3A+A+survey&author=Ruggiero+F.&author=Lippiello+V.&author=Siciliano+B.&publication+year=2018&journal=IEEE+Robot+Auto+Lett&volume=3&doi=10.1109%2FLRA.2018.2801939&pages=1711-1718",openUrlParams:{genre:e,atitle:aN,jtitle:i,title:i,volume:z,artnum:"65f99c3d442f7c0001a347c7",spage:"1711",epage:"1718",date:n,sid:d,aulast:a,aufirst:a,doi:aO,au:a},innerRefId:"r6",title:aN,doi:aO,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2018.2801939",pubMedLink:a}]},{id:"ref7",displayNumber:"[7]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESardain\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EP.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBessonnet\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EForces acting on a biped robot. center of pressure-zero moment point\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Trans Syst, Man, Cyber - Part A: Syst Humans\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E34\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E5\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E630\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E637\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2004\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Forces acting on a biped robot. center of pressure-zero moment point' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTSMCA.2004.832811\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Forces acting on a biped robot. center of pressure-zero moment point' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Forces+acting+on+a+biped+robot.+center+of+pressure-zero+moment+point&author=Sardain+P.&author=Bessonnet+G.&publication+year=2004&journal=IEEE+Trans+Syst%2C+Man%2C+Cyber+-+Part+A%3A+Syst+Humans&volume=34&doi=10.1109%2FTSMCA.2004.832811&pages=630-637\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Forces+acting+on+a+biped+robot.+center+of+pressure-zero+moment+point&author=Sardain+P.&author=Bessonnet+G.&publication+year=2004&journal=IEEE+Trans+Syst%2C+Man%2C+Cyber+-+Part+A%3A+Syst+Humans&volume=34&doi=10.1109%2FTSMCA.2004.832811&pages=630-637",openUrlParams:{genre:e,atitle:aP,jtitle:aQ,title:aQ,volume:aR,artnum:"65f99c3d442f7c0001a347c8",spage:"630",epage:"637",date:W,sid:d,aulast:a,aufirst:a,doi:aS,au:a},innerRefId:"r7",title:aP,doi:aS,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTSMCA.2004.832811",pubMedLink:a}]},{id:"ref8",displayNumber:"[8]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFarid\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EY.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EReview and descriptive investigation of the connection between bipedal locomotion and non-prehensile manipulation\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EAnnu Rev Control\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E53\u003C\u002Fspan\u003E, \u003Cspan class=\"fpage\"\u003E51\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E69\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2022\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Review and descriptive investigation of the connection between bipedal locomotion and non-prehensile manipulation' href=https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.arcontrol.2022.04.009\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Review and descriptive investigation of the connection between bipedal locomotion and non-prehensile manipulation' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Review+and+descriptive+investigation+of+the+connection+between+bipedal+locomotion+and+non-prehensile+manipulation&author=Farid+Y.&author=Siciliano+B.&author=Ruggiero+F.&publication+year=2022&journal=Annu+Rev+Control&volume=53&doi=10.1016%2Fj.arcontrol.2022.04.009&pages=51-69\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Review+and+descriptive+investigation+of+the+connection+between+bipedal+locomotion+and+non-prehensile+manipulation&author=Farid+Y.&author=Siciliano+B.&author=Ruggiero+F.&publication+year=2022&journal=Annu+Rev+Control&volume=53&doi=10.1016%2Fj.arcontrol.2022.04.009&pages=51-69",openUrlParams:{genre:e,atitle:aT,jtitle:aU,title:aU,volume:aV,artnum:"65f99c3d442f7c0001a347c9",spage:aW,epage:aX,date:j,sid:d,aulast:a,aufirst:a,doi:aY,au:a},innerRefId:"r8",title:aT,doi:aY,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.arcontrol.2022.04.009",pubMedLink:a}]},{id:"ref9",displayNumber:"[9]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMcGeer\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ET.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EPassive dynamic walking\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EInt J Robot Res\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E9\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E2\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E62\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E82\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E1990\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Passive dynamic walking' href=https:\u002F\u002Fdx.doi.org\u002F10.1177\u002F027836499000900206\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Passive dynamic walking' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Passive+dynamic+walking&author=McGeer+T.&publication+year=1990&journal=Int+J+Robot+Res&volume=9&doi=10.1177%2F027836499000900206&pages=62-82\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Passive+dynamic+walking&author=McGeer+T.&publication+year=1990&journal=Int+J+Robot+Res&volume=9&doi=10.1177%2F027836499000900206&pages=62-82",openUrlParams:{genre:e,atitle:aZ,jtitle:w,title:w,volume:O,artnum:"65f99c3d442f7c0001a347ca",spage:a_,epage:"82",date:"1990",sid:d,aulast:a,aufirst:a,doi:a$,au:a},innerRefId:"r9",title:aZ,doi:a$,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1177\u002F027836499000900206",pubMedLink:a}]},{id:"ref10",displayNumber:"[10]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EHolm\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESpong\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EKinetic Energy Shaping for Gait Regulation of Underactuated Bipeds\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EIEEE International conference on control applications\u003C\u002Fem\u003E, \u003Cspan class=\"publisher-loc\"\u003ESan Antonio, Texas, USA\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2008\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E1232\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E1238\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Kinetic Energy Shaping for Gait Regulation of Underactuated Bipeds' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Kinetic+Energy+Shaping+for+Gait+Regulation+of+Underactuated+Bipeds&author=Holm+J.&author=Spong+M.&publication+year=2008\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Kinetic+Energy+Shaping+for+Gait+Regulation+of+Underactuated+Bipeds&author=Holm+J.&author=Spong+M.&publication+year=2008",openUrlParams:{genre:h,date:G,sid:d,title:ba},innerRefId:"r10",title:ba,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref11",displayNumber:"[11]",existInContent:c,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESpong\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBullo\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"chapter-title\"\u003EControlled Symmetries and Passive Walking\u003C\u002Fspan\u003E,” In: \u003Cspan class=\"source\"\u003EProceeding IFAC Triennal World Congress\u003C\u002Fspan\u003E, (\u003Cspan class=\"publisher-loc\"\u003EBarcelona, Spain\u003C\u002Fspan\u003E, \u003Cspan class=\"year\"\u003E2002\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Proceeding IFAC Triennal World Congress' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Proceeding+IFAC+Triennal+World+Congress&author=Spong+M.&author=Bullo+F.&publication+year=2002\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Proceeding+IFAC+Triennal+World+Congress&author=Spong+M.&author=Bullo+F.&publication+year=2002",openUrlParams:{genre:o,btitle:X,title:X,atitle:"Controlled Symmetries and Passive Walking",aulast:a,aufirst:a,au:a,pub:f,date:Y,spage:f,epage:f,doi:a,sid:d},innerRefId:"r11",title:X,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref12",displayNumber:"[12]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESpong\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EHolm\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELee\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"chapter-title\"\u003EPassivity-Based Control of Bipedal Locomotion\u003C\u002Fspan\u003E,” In: \u003Cspan class=\"source\"\u003EIEEE Robotics & Automation Magazine\u003C\u002Fspan\u003E, vol. \u003Cspan class=\"volume\"\u003E12\u003C\u002Fspan\u003E, (\u003Cspan class=\"year\"\u003E2007\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E30\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E40\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for IEEE Robotics & Automation Magazine' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=IEEE+Robotics+%26+Automation+Magazine&author=Spong+M.&author=Holm+J.&author=Lee+D.&publication+year=2007&pages=30-40\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=IEEE+Robotics+%26+Automation+Magazine&author=Spong+M.&author=Holm+J.&author=Lee+D.&publication+year=2007&pages=30-40",openUrlParams:{genre:o,btitle:Z,title:Z,atitle:"Passivity-Based Control of Bipedal Locomotion",aulast:a,aufirst:a,au:a,pub:f,date:_,spage:bb,epage:bc,doi:a,sid:d},innerRefId:"r12",title:Z,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref13",displayNumber:"[13]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMao\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EL.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGao\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ETian\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EY.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EZhao\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EY.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ENovel method for preventing shin-collisions in six-legged robots by utilising a robot–terrain interference model\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EMech Mach Theory\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E151\u003C\u002Fspan\u003E, \u003Cspan class=\"fpage\"\u003E103897\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2020\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Novel method for preventing shin-collisions in six-legged robots by utilising a robot–terrain interference model' href=https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.mechmachtheory.2020.103897\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Novel method for preventing shin-collisions in six-legged robots by utilising a robot–terrain interference model' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Novel+method+for+preventing+shin-collisions+in+six-legged+robots+by+utilising+a+robot%E2%80%93terrain+interference+model&author=Mao+L.&author=Gao+F.&author=Tian+Y.&author=Zhao+Y.&publication+year=2020&journal=Mech+Mach+Theory&volume=151&doi=10.1016%2Fj.mechmachtheory.2020.103897\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Novel+method+for+preventing+shin-collisions+in+six-legged+robots+by+utilising+a+robot%E2%80%93terrain+interference+model&author=Mao+L.&author=Gao+F.&author=Tian+Y.&author=Zhao+Y.&publication+year=2020&journal=Mech+Mach+Theory&volume=151&doi=10.1016%2Fj.mechmachtheory.2020.103897",openUrlParams:{genre:e,atitle:bd,jtitle:y,title:y,volume:be,artnum:"65f99c3d442f7c0001a347ce",spage:"103897",epage:f,date:l,sid:d,aulast:a,aufirst:a,doi:bf,au:a},innerRefId:"r13",title:bd,doi:bf,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.mechmachtheory.2020.103897",pubMedLink:a}]},{id:"ref14",displayNumber:"[14]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBledt\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EWensing\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EP. M.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EIngersoll\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EKim\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EContact Model Fusion for Event-Based Locomotion in Unstructured Terrains\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EIEEE International Conference on Robotics and Automation (ICRA) 2018\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2018\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E4399\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E4406\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Contact Model Fusion for Event-Based Locomotion in Unstructured Terrains' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Contact+Model+Fusion+for+Event-Based+Locomotion+in+Unstructured+Terrains&author=Bledt+G.&author=Wensing+P.+M.&author=Ingersoll+S.&author=Kim+S.&publication+year=2018\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Contact+Model+Fusion+for+Event-Based+Locomotion+in+Unstructured+Terrains&author=Bledt+G.&author=Wensing+P.+M.&author=Ingersoll+S.&author=Kim+S.&publication+year=2018",openUrlParams:{genre:h,date:n,sid:d,title:bg},innerRefId:"r14",title:bg,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref15",displayNumber:"[15]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFahmi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMastalli\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EC.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFocchi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESemini\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EC.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EPassive whole-body control for quadruped robots: Experimental validation over challenging terrain\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Robot Auto Lett\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E4\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E3\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E2553\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E2560\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2019\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Passive whole-body control for quadruped robots: Experimental validation over challenging terrain' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2019.2908502\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Passive whole-body control for quadruped robots: Experimental validation over challenging terrain' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Passive+whole-body+control+for+quadruped+robots%3A+Experimental+validation+over+challenging+terrain&author=Fahmi+S.&author=Mastalli+C.&author=Focchi+M.&author=Semini+C.&publication+year=2019&journal=IEEE+Robot+Auto+Lett&volume=4&doi=10.1109%2FLRA.2019.2908502&pages=2553-2560\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Passive+whole-body+control+for+quadruped+robots%3A+Experimental+validation+over+challenging+terrain&author=Fahmi+S.&author=Mastalli+C.&author=Focchi+M.&author=Semini+C.&publication+year=2019&journal=IEEE+Robot+Auto+Lett&volume=4&doi=10.1109%2FLRA.2019.2908502&pages=2553-2560",openUrlParams:{genre:e,atitle:bh,jtitle:i,title:i,volume:bi,artnum:"65f99c3d442f7c0001a347d0",spage:"2553",epage:"2560",date:k,sid:d,aulast:a,aufirst:a,doi:bj,au:a},innerRefId:"r15",title:bh,doi:bj,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2019.2908502",pubMedLink:a}]},{id:"ref16",displayNumber:"[16]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBertoncelli\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESabattini\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EL.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ELinear Time-Varying mpc for Nonprehensile Object Manipulation with a Nonholonomic Mobile Robot\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EIEEE International Conference on Robotics and Automation (ICRA)\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2020\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E11032\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E11038\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Linear Time-Varying mpc for Nonprehensile Object Manipulation with a Nonholonomic Mobile Robot' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Linear+Time-Varying+mpc+for+Nonprehensile+Object+Manipulation+with+a+Nonholonomic+Mobile+Robot&author=Bertoncelli+F.&author=Ruggiero+F.&author=Sabattini+L.&publication+year=2020\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Linear+Time-Varying+mpc+for+Nonprehensile+Object+Manipulation+with+a+Nonholonomic+Mobile+Robot&author=Bertoncelli+F.&author=Ruggiero+F.&author=Sabattini+L.&publication+year=2020",openUrlParams:{genre:h,date:l,sid:d,title:bk},innerRefId:"r16",title:bk,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref17",displayNumber:"[17]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBertoncelli\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESelvaggio\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESabattini\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EL.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ETask-Oriented Contact Optimization for Pushing Manipulation with Mobile Robots\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EIEEE\u002FRSJ International Conference on Intelligent Robots and Systems (IROS)\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2022\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E1639\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E1646\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Task-Oriented Contact Optimization for Pushing Manipulation with Mobile Robots' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Task-Oriented+Contact+Optimization+for+Pushing+Manipulation+with+Mobile+Robots&author=Bertoncelli+F.&author=Selvaggio+M.&author=Ruggiero+F.&author=Sabattini+L.&publication+year=2022\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Task-Oriented+Contact+Optimization+for+Pushing+Manipulation+with+Mobile+Robots&author=Bertoncelli+F.&author=Selvaggio+M.&author=Ruggiero+F.&author=Sabattini+L.&publication+year=2022",openUrlParams:{genre:h,date:j,sid:d,title:bl},innerRefId:"r17",title:bl,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref18",displayNumber:"[18]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMorlando\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESelvaggio\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ENonprehensile Object Transportation with a Legged Manipulator\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EInternational Conference on Robotics and Automation (ICRA)\u003C\u002Fem\u003E, \u003Cspan class=\"year\"\u003E2022\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E6628\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E6634\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Nonprehensile Object Transportation with a Legged Manipulator' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FICRA46639.2022.9811810\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Nonprehensile Object Transportation with a Legged Manipulator' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Nonprehensile+Object+Transportation+with+a+Legged+Manipulator&author=Morlando+V.&author=Selvaggio+M.&author=Ruggiero+F.&publication+year=2022\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Nonprehensile+Object+Transportation+with+a+Legged+Manipulator&author=Morlando+V.&author=Selvaggio+M.&author=Ruggiero+F.&publication+year=2022",openUrlParams:{genre:h,date:j,sid:d,title:bm},innerRefId:"r18",title:bm,doi:"10.1109\u002FICRA46639.2022.9811810",crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FICRA46639.2022.9811810",pubMedLink:a}]},{id:"ref19",displayNumber:"[19]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESelvaggio\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECacace\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EPacchierotti\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EC.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGiordano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EP. R.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EA shared-control teleoperation architecture for nonprehensile object transportation\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Trans Robot\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E38\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E1\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E569\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E583\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2022\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for A shared-control teleoperation architecture for nonprehensile object transportation' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTRO.2021.3086773\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for A shared-control teleoperation architecture for nonprehensile object transportation' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+shared-control+teleoperation+architecture+for+nonprehensile+object+transportation&author=Selvaggio+M.&author=Cacace+J.&author=Pacchierotti+C.&author=Ruggiero+F.&author=Giordano+P.+R.&publication+year=2022&journal=IEEE+Trans+Robot&volume=38&doi=10.1109%2FTRO.2021.3086773&pages=569-583\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+shared-control+teleoperation+architecture+for+nonprehensile+object+transportation&author=Selvaggio+M.&author=Cacace+J.&author=Pacchierotti+C.&author=Ruggiero+F.&author=Giordano+P.+R.&publication+year=2022&journal=IEEE+Trans+Robot&volume=38&doi=10.1109%2FTRO.2021.3086773&pages=569-583",openUrlParams:{genre:e,atitle:bn,jtitle:m,title:m,volume:N,artnum:"65f99c3d442f7c0001a347d4",spage:"569",epage:"583",date:j,sid:d,aulast:a,aufirst:a,doi:bo,au:a},innerRefId:"r19",title:bn,doi:bo,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTRO.2021.3086773",pubMedLink:a}]},{id:"ref20",displayNumber:"[20]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESelvaggio\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGarg\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EOriolo\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ENon-prehensile object transportation via model predictive non-sliding manipulation control\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Trans Contr Syst T\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E31\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E5\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E2231\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E2244\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2023\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Non-prehensile object transportation via model predictive non-sliding manipulation control' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTCST.2023.3277224\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Non-prehensile object transportation via model predictive non-sliding manipulation control' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Non-prehensile+object+transportation+via+model+predictive+non-sliding+manipulation+control&author=Selvaggio+M.&author=Garg+A.&author=Ruggiero+F.&author=Oriolo+G.&author=Siciliano+B.&publication+year=2023&journal=IEEE+Trans+Contr+Syst+T&volume=31&doi=10.1109%2FTCST.2023.3277224&pages=2231-2244\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Non-prehensile+object+transportation+via+model+predictive+non-sliding+manipulation+control&author=Selvaggio+M.&author=Garg+A.&author=Ruggiero+F.&author=Oriolo+G.&author=Siciliano+B.&publication+year=2023&journal=IEEE+Trans+Contr+Syst+T&volume=31&doi=10.1109%2FTCST.2023.3277224&pages=2231-2244",openUrlParams:{genre:e,atitle:bp,jtitle:bq,title:bq,volume:br,artnum:"65f99c3d442f7c0001a347d5",spage:"2231",epage:"2244",date:q,sid:d,aulast:a,aufirst:a,doi:bs,au:a},innerRefId:"r20",title:bp,doi:bs,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTCST.2023.3277224",pubMedLink:a}]},{id:"ref21",displayNumber:"[21]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESubburaman\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESelvaggio\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EA non-prehensile object transportation framework with adaptive tilting based on quadratic programming\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Robot Auto Lett\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E8\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E6\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E3581\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E3588\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2023\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for A non-prehensile object transportation framework with adaptive tilting based on quadratic programming' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2023.3268594\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for A non-prehensile object transportation framework with adaptive tilting based on quadratic programming' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+non-prehensile+object+transportation+framework+with+adaptive+tilting+based+on+quadratic+programming&author=Subburaman+R.&author=Selvaggio+M.&author=Ruggiero+F.&publication+year=2023&journal=IEEE+Robot+Auto+Lett&volume=8&doi=10.1109%2FLRA.2023.3268594&pages=3581-3588\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+non-prehensile+object+transportation+framework+with+adaptive+tilting+based+on+quadratic+programming&author=Subburaman+R.&author=Selvaggio+M.&author=Ruggiero+F.&publication+year=2023&journal=IEEE+Robot+Auto+Lett&volume=8&doi=10.1109%2FLRA.2023.3268594&pages=3581-3588",openUrlParams:{genre:e,atitle:bt,jtitle:i,title:i,volume:$,artnum:"65f99c3d442f7c0001a347d6",spage:"3581",epage:"3588",date:q,sid:d,aulast:a,aufirst:a,doi:bu,au:a},innerRefId:"r21",title:bt,doi:bu,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2023.3268594",pubMedLink:a}]},{id:"ref22",displayNumber:"[22]",existInContent:c,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EDonaire\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBuonocore\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EL. R.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EPassivity-based control for a rolling-balancing system: The nonprehensile disk-on-disk\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Trans Contr Syst Tech\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E25\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E6\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E2135\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E2142\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2017\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Passivity-based control for a rolling-balancing system: The nonprehensile disk-on-disk' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTCST.2016.2637719\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Passivity-based control for a rolling-balancing system: The nonprehensile disk-on-disk' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Passivity-based+control+for+a+rolling-balancing+system%3A+The+nonprehensile+disk-on-disk&author=Donaire+A.&author=Ruggiero+F.&author=Buonocore+L.+R.&author=Lippiello+V.&author=Siciliano+B.&publication+year=2017&journal=IEEE+Trans+Contr+Syst+Tech&volume=25&doi=10.1109%2FTCST.2016.2637719&pages=2135-2142\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Passivity-based+control+for+a+rolling-balancing+system%3A+The+nonprehensile+disk-on-disk&author=Donaire+A.&author=Ruggiero+F.&author=Buonocore+L.+R.&author=Lippiello+V.&author=Siciliano+B.&publication+year=2017&journal=IEEE+Trans+Contr+Syst+Tech&volume=25&doi=10.1109%2FTCST.2016.2637719&pages=2135-2142",openUrlParams:{genre:e,atitle:bv,jtitle:P,title:P,volume:bw,artnum:"65f99c3d442f7c0001a347d7",spage:"2135",epage:"2142",date:A,sid:d,aulast:a,aufirst:a,doi:bx,au:a},innerRefId:"r22",title:bv,doi:bx,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTCST.2016.2637719",pubMedLink:a}]},{id:"ref23",displayNumber:"[23]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFontanelli\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG. A.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESelvaggio\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBuonocore\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EL. R.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFicuciello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EVillani\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EL.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EA new laparoscopic tool with in-hand rolling capabilities for needle reorientation\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Robot Autom Lett\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E3\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E3\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E2354\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E2361\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2018\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for A new laparoscopic tool with in-hand rolling capabilities for needle reorientation' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2018.2809443\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for A new laparoscopic tool with in-hand rolling capabilities for needle reorientation' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+new+laparoscopic+tool+with+in-hand+rolling+capabilities+for+needle+reorientation&author=Fontanelli+G.+A.&author=Selvaggio+M.&author=Buonocore+L.+R.&author=Ficuciello+F.&author=Villani+L.&author=Siciliano+B.&publication+year=2018&journal=IEEE+Robot+Autom+Lett&volume=3&doi=10.1109%2FLRA.2018.2809443&pages=2354-2361\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+new+laparoscopic+tool+with+in-hand+rolling+capabilities+for+needle+reorientation&author=Fontanelli+G.+A.&author=Selvaggio+M.&author=Buonocore+L.+R.&author=Ficuciello+F.&author=Villani+L.&author=Siciliano+B.&publication+year=2018&journal=IEEE+Robot+Autom+Lett&volume=3&doi=10.1109%2FLRA.2018.2809443&pages=2354-2361",openUrlParams:{genre:e,atitle:by,jtitle:s,title:s,volume:z,artnum:"65f99c3d442f7c0001a347d8",spage:"2354",epage:"2361",date:n,sid:d,aulast:a,aufirst:a,doi:bz,au:a},innerRefId:"r23",title:by,doi:bz,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2018.2809443",pubMedLink:a}]},{id:"ref24",displayNumber:"[24]",existInContent:c,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGutiérrez-Giles\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ENonprehensile manipulation of an underactuated mechanical system with second-order nonholonomic constraints: The robotic hula-hoop\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Robot Autom Lett\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E3\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E2\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E1136\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E1143\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2018\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Nonprehensile manipulation of an underactuated mechanical system with second-order nonholonomic constraints: The robotic hula-hoop' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2018.2792403\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Nonprehensile manipulation of an underactuated mechanical system with second-order nonholonomic constraints: The robotic hula-hoop' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Nonprehensile+manipulation+of+an+underactuated+mechanical+system+with+second-order+nonholonomic+constraints%3A+The+robotic+hula-hoop&author=Guti%C3%A9rrez-Giles+A.&author=Ruggiero+F.&author=Lippiello+V.&author=Siciliano+B.&publication+year=2018&journal=IEEE+Robot+Autom+Lett&volume=3&doi=10.1109%2FLRA.2018.2792403&pages=1136-1143\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Nonprehensile+manipulation+of+an+underactuated+mechanical+system+with+second-order+nonholonomic+constraints%3A+The+robotic+hula-hoop&author=Guti%C3%A9rrez-Giles+A.&author=Ruggiero+F.&author=Lippiello+V.&author=Siciliano+B.&publication+year=2018&journal=IEEE+Robot+Autom+Lett&volume=3&doi=10.1109%2FLRA.2018.2792403&pages=1136-1143",openUrlParams:{genre:e,atitle:bA,jtitle:s,title:s,volume:z,artnum:"65f99c3d442f7c0001a347d9",spage:"1136",epage:"1143",date:n,sid:d,aulast:a,aufirst:a,doi:bB,au:a},innerRefId:"r24",title:bA,doi:bB,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2018.2792403",pubMedLink:a}]},{id:"ref25",displayNumber:"[25]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECacace\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESantamaria-Navarro\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EAndrade-Cetto\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ETrujillo\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EEsteves\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EY.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EViguria\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EHybrid visual servoing with hierarchical task composition for aerial manipulation\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Robot Auto Lett\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E1\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E1\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E259\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E266\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2016\u003C\u002Fspan\u003Ea).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Hybrid visual servoing with hierarchical task composition for aerial manipulation' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2015.2510749\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Hybrid visual servoing with hierarchical task composition for aerial manipulation' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Hybrid+visual+servoing+with+hierarchical+task+composition+for+aerial+manipulation&author=Lippiello+V.&author=Cacace+J.&author=Santamaria-Navarro+A.&author=Andrade-Cetto+J.&author=Trujillo+M.&author=Esteves+Y.&author=Viguria+A.&publication+year=2016&journal=IEEE+Robot+Auto+Lett&volume=1&doi=10.1109%2FLRA.2015.2510749&pages=259-266\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Hybrid+visual+servoing+with+hierarchical+task+composition+for+aerial+manipulation&author=Lippiello+V.&author=Cacace+J.&author=Santamaria-Navarro+A.&author=Andrade-Cetto+J.&author=Trujillo+M.&author=Esteves+Y.&author=Viguria+A.&publication+year=2016&journal=IEEE+Robot+Auto+Lett&volume=1&doi=10.1109%2FLRA.2015.2510749&pages=259-266",openUrlParams:{genre:e,atitle:bC,jtitle:i,title:i,volume:E,artnum:"65f99c3d442f7c0001a347da",spage:"259",epage:"266",date:v,sid:d,aulast:a,aufirst:a,doi:bD,au:a},innerRefId:"r25",title:bC,doi:bD,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2015.2510749",pubMedLink:a}]},{id:"ref26",displayNumber:"[26]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERyu\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.-C.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELynch\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EK. M.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EControl of nonprehensile rolling manipulation: Balancing a disk on a disk\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Trans Robot\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E29\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E5\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E1152\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E1161\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2013\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Control of nonprehensile rolling manipulation: Balancing a disk on a disk' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTRO.2013.2262775\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Control of nonprehensile rolling manipulation: Balancing a disk on a disk' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Control+of+nonprehensile+rolling+manipulation%3A+Balancing+a+disk+on+a+disk&author=Ryu+J.-C.&author=Ruggiero+F.&author=Lynch+K.+M.&publication+year=2013&journal=IEEE+Trans+Robot&volume=29&doi=10.1109%2FTRO.2013.2262775&pages=1152-1161\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Control+of+nonprehensile+rolling+manipulation%3A+Balancing+a+disk+on+a+disk&author=Ryu+J.-C.&author=Ruggiero+F.&author=Lynch+K.+M.&publication+year=2013&journal=IEEE+Trans+Robot&volume=29&doi=10.1109%2FTRO.2013.2262775&pages=1152-1161",openUrlParams:{genre:e,atitle:bE,jtitle:m,title:m,volume:"29",artnum:"65f99c3d442f7c0001a347db",spage:"1152",epage:"1161",date:Q,sid:d,aulast:a,aufirst:a,doi:bF,au:a},innerRefId:"r26",title:bE,doi:bF,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTRO.2013.2262775",pubMedLink:a}]},{id:"ref27",displayNumber:"[27]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESerra\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFerguson\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiniscalco\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EPetit\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EOn the Experiments about the Nonprehensile Reconfiguration of a Rolling Sphere on a Plate\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003E26th Mediterranean Conference on Control and Automation (MED)\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2018\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E13\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E20\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for On the Experiments about the Nonprehensile Reconfiguration of a Rolling Sphere on a Plate' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=On+the+Experiments+about+the+Nonprehensile+Reconfiguration+of+a+Rolling+Sphere+on+a+Plate&author=Serra+D.&author=Ferguson+J.&author=Ruggiero+F.&author=Siniscalco+A.&author=Petit+A.&author=Lippiello+V.&author=Siciliano+B.&publication+year=2018\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=On+the+Experiments+about+the+Nonprehensile+Reconfiguration+of+a+Rolling+Sphere+on+a+Plate&author=Serra+D.&author=Ferguson+J.&author=Ruggiero+F.&author=Siniscalco+A.&author=Petit+A.&author=Lippiello+V.&author=Siciliano+B.&publication+year=2018",openUrlParams:{genre:h,date:n,sid:d,title:bG},innerRefId:"r27",title:bG,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref28",displayNumber:"[28]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGutiérrez-Giles\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ECosed-Loop Control of a Nonprehensile Manipulation System Inspired by a Pizza-Peel Mechanism\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EEuropean Control Conference\u003C\u002Fem\u003E, ( \u003Cspan class=\"year\"\u003E2019\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E1580\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E1585\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Cosed-Loop Control of a Nonprehensile Manipulation System Inspired by a Pizza-Peel Mechanism' href=https:\u002F\u002Fdx.doi.org\u002F10.23919\u002FECC.2019.8796077\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Cosed-Loop Control of a Nonprehensile Manipulation System Inspired by a Pizza-Peel Mechanism' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Cosed-Loop+Control+of+a+Nonprehensile+Manipulation+System+Inspired+by+a+Pizza-Peel+Mechanism&author=Guti%C3%A9rrez-Giles+A.&author=Ruggiero+F.&author=Lippiello+V.&author=Siciliano+B.&publication+year=2019\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Cosed-Loop+Control+of+a+Nonprehensile+Manipulation+System+Inspired+by+a+Pizza-Peel+Mechanism&author=Guti%C3%A9rrez-Giles+A.&author=Ruggiero+F.&author=Lippiello+V.&author=Siciliano+B.&publication+year=2019",openUrlParams:{genre:h,date:k,sid:d,title:bH},innerRefId:"r28",title:bH,doi:"10.23919\u002FECC.2019.8796077",crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.23919\u002FECC.2019.8796077",pubMedLink:a}]},{id:"ref29",displayNumber:"[29]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EKim\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.-T.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"chapter-title\"\u003EPlanning Framework for Robotic Pizza Dough Stretching with a Rolling Pin\u003C\u002Fspan\u003E,” In: \u003Cspan class=\"source\"\u003ERobot Dynamic Manipulation. Perception of Deformable Objects and Nonprehensile Manipulation Control\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E eds.),\u003Cspan class=\"volume\"\u003E144\u003C\u002Fspan\u003E (\u003Cspan class=\"publisher-name\"\u003ESpringer\u003C\u002Fspan\u003E, \u003Cspan class=\"publisher-loc\"\u003ECham\u003C\u002Fspan\u003E, \u003Cspan class=\"year\"\u003E2022\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E229\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E253\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Robot Dynamic Manipulation. Perception of Deformable Objects and Nonprehensile Manipulation Control' href=https:\u002F\u002Fdx.doi.org\u002F10.1007\u002F978-3-030-93290-9_9\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Robot Dynamic Manipulation. Perception of Deformable Objects and Nonprehensile Manipulation Control' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Robot+Dynamic+Manipulation.+Perception+of+Deformable+Objects+and+Nonprehensile+Manipulation+Control&author=Kim+J.-T.&author=Ruggiero+F.&author=Lippiello+V.&author=Siciliano+B.&author=Siciliano+B.&author=Ruggiero+F.&publication+year=2022&pages=229-253\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Robot+Dynamic+Manipulation.+Perception+of+Deformable+Objects+and+Nonprehensile+Manipulation+Control&author=Kim+J.-T.&author=Ruggiero+F.&author=Lippiello+V.&author=Siciliano+B.&author=Siciliano+B.&author=Ruggiero+F.&publication+year=2022&pages=229-253",openUrlParams:{genre:o,btitle:aa,title:aa,atitle:"Planning Framework for Robotic Pizza Dough Stretching with a Rolling Pin",aulast:a,aufirst:a,au:a,pub:B,date:j,spage:"229",epage:"253",doi:bI,sid:d},innerRefId:"r29",title:aa,doi:bI,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1007\u002F978-3-030-93290-9_9",pubMedLink:a}]},{id:"ref30",displayNumber:"[30]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EKim\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.-T.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGutiérrez-Giles\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESatici\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EDonaire\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECacace\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBuonocore\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EL. R.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFontanelli\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG. A.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"chapter-title\"\u003ENonprehensile Manipulation Control and Task Planning for Deformable Object Manipulation: Results From the RoDyMan Project\u003C\u002Fspan\u003E,” In: \u003Cspan class=\"source\"\u003EInformatics in Control, Automation and Robotics, Lecture Notes in Electrical Engineering\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGusikhin\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EO.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMadani\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EK.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E (\u003Cspan class=\"publisher-name\"\u003ESpringer\u003C\u002Fspan\u003E, \u003Cspan class=\"publisher-loc\"\u003ECham\u003C\u002Fspan\u003E, \u003Cspan class=\"year\"\u003E2020\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E76\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E100\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Informatics in Control, Automation and Robotics, Lecture Notes in Electrical Engineering' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Informatics+in+Control%2C+Automation+and+Robotics%2C+Lecture+Notes+in+Electrical+Engineering&author=Ruggiero+F.&author=Kim+J.-T.&author=Guti%C3%A9rrez-Giles+A.&author=Satici+A.&author=Donaire+A.&author=Cacace+J.&author=Buonocore+L.+R.&author=Fontanelli+G.+A.&author=Lippiello+V.&author=Siciliano+B.&author=Gusikhin+O.&author=Madani+K.&publication+year=2020&pages=76-100\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Informatics+in+Control%2C+Automation+and+Robotics%2C+Lecture+Notes+in+Electrical+Engineering&author=Ruggiero+F.&author=Kim+J.-T.&author=Guti%C3%A9rrez-Giles+A.&author=Satici+A.&author=Donaire+A.&author=Cacace+J.&author=Buonocore+L.+R.&author=Fontanelli+G.+A.&author=Lippiello+V.&author=Siciliano+B.&author=Gusikhin+O.&author=Madani+K.&publication+year=2020&pages=76-100",openUrlParams:{genre:o,btitle:ab,title:ab,atitle:"Nonprehensile Manipulation Control and Task Planning for Deformable Object Manipulation: Results From the RoDyMan Project",aulast:a,aufirst:a,au:a,pub:B,date:l,spage:"76",epage:"100",doi:a,sid:d},innerRefId:"r30",title:ab,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref31",displayNumber:"[31]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EPetit\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESerra\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESatici\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA. C.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECacace\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EDonaire\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFicuciello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBuonocore\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EL. R.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFontanelli\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG. A.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EVillani\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EL.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ENonprehensile manipulation of deformable objects: Achievements and perspectives from the roDyMan project\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Robot Autom Mag\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E25\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E3\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E83\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E92\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2018\u003C\u002Fspan\u003Ec).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Nonprehensile manipulation of deformable objects: Achievements and perspectives from the roDyMan project' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FMRA.2017.2781306\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Nonprehensile manipulation of deformable objects: Achievements and perspectives from the roDyMan project' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Nonprehensile+manipulation+of+deformable+objects%3A+Achievements+and+perspectives+from+the+roDyMan+project&author=Ruggiero+F.&author=Petit+A.&author=Serra+D.&author=Satici+A.+C.&author=Cacace+J.&author=Donaire+A.&author=Ficuciello+F.&author=Buonocore+L.+R.&author=Fontanelli+G.+A.&author=Lippiello+V.&author=Villani+L.&author=Siciliano+B.&publication+year=2018&journal=IEEE+Robot+Autom+Mag&volume=25&doi=10.1109%2FMRA.2017.2781306&pages=83-92\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Nonprehensile+manipulation+of+deformable+objects%3A+Achievements+and+perspectives+from+the+roDyMan+project&author=Ruggiero+F.&author=Petit+A.&author=Serra+D.&author=Satici+A.+C.&author=Cacace+J.&author=Donaire+A.&author=Ficuciello+F.&author=Buonocore+L.+R.&author=Fontanelli+G.+A.&author=Lippiello+V.&author=Villani+L.&author=Siciliano+B.&publication+year=2018&journal=IEEE+Robot+Autom+Mag&volume=25&doi=10.1109%2FMRA.2017.2781306&pages=83-92",openUrlParams:{genre:e,atitle:bJ,jtitle:R,title:R,volume:bw,artnum:"65f99c3d442f7c0001a347e0",spage:"83",epage:"92",date:n,sid:d,aulast:a,aufirst:a,doi:bK,au:a},innerRefId:"r31",title:bJ,doi:bK,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FMRA.2017.2781306",pubMedLink:a}]},{id:"ref32",displayNumber:"[32]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EArpenti\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EP.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EDonaire\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EEnergy Pumping-and-Damping for Gait Robustification of Underactuated Planar Biped Robots Within the Hybrid Zero Dynamics Framework\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EIEEE-RAS 20th International Conference on Humanoid Robots (Humanoids)\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2020\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E415\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E421\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Energy Pumping-and-Damping for Gait Robustification of Underactuated Planar Biped Robots Within the Hybrid Zero Dynamics Framework' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Energy+Pumping-and-Damping+for+Gait+Robustification+of+Underactuated+Planar+Biped+Robots+Within+the+Hybrid+Zero+Dynamics+Framework&author=Arpenti+P.&author=Donaire+A.&author=Ruggiero+F.&author=Lippiello+V.&publication+year=2020\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Energy+Pumping-and-Damping+for+Gait+Robustification+of+Underactuated+Planar+Biped+Robots+Within+the+Hybrid+Zero+Dynamics+Framework&author=Arpenti+P.&author=Donaire+A.&author=Ruggiero+F.&author=Lippiello+V.&publication+year=2020",openUrlParams:{genre:h,date:l,sid:d,title:bL},innerRefId:"r32",title:bL,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref33",displayNumber:"[33]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EArpenti\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EP.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EDonaire\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EUniform global exponential stabilizing passivity-based tracking controller applied to planar biped robots\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EIEEE\u002FRSJ International Conference on Intelligent Robots and Systems (IROS)\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2022\u003C\u002Fspan\u003Ea) pp. \u003Cspan class=\"fpage\"\u003E6739\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E6745\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Uniform global exponential stabilizing passivity-based tracking controller applied to planar biped robots' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FIROS47612.2022.9981206\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Uniform global exponential stabilizing passivity-based tracking controller applied to planar biped robots' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Uniform+global+exponential+stabilizing+passivity-based+tracking+controller+applied+to+planar+biped+robots&author=Arpenti+P.&author=Donaire+A.&author=Ruggiero+F.&author=Lippiello+V.&publication+year=2022\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Uniform+global+exponential+stabilizing+passivity-based+tracking+controller+applied+to+planar+biped+robots&author=Arpenti+P.&author=Donaire+A.&author=Ruggiero+F.&author=Lippiello+V.&publication+year=2022",openUrlParams:{genre:h,date:j,sid:d,title:bM},innerRefId:"r33",title:bM,doi:"10.1109\u002FIROS47612.2022.9981206",crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FIROS47612.2022.9981206",pubMedLink:a}]},{id:"ref34",displayNumber:"[34]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EArpenti\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EP.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EInterconnection and Damping Assignment Passivity-Based Control for Gait Generation in Underactuated Compass-like Robots\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EIEEE International Conference on Robotics and Automation (ICRA)\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2020\u003C\u002Fspan\u003Eb) pp. \u003Cspan class=\"fpage\"\u003E9802\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E9808\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Interconnection and Damping Assignment Passivity-Based Control for Gait Generation in Underactuated Compass-like Robots' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FICRA40945.2020.9196598\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Interconnection and Damping Assignment Passivity-Based Control for Gait Generation in Underactuated Compass-like Robots' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Interconnection+and+Damping+Assignment+Passivity-Based+Control+for+Gait+Generation+in+Underactuated+Compass-like+Robots&author=Arpenti+P.&author=Ruggiero+F.&author=Lippiello+V.&publication+year=2020\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Interconnection+and+Damping+Assignment+Passivity-Based+Control+for+Gait+Generation+in+Underactuated+Compass-like+Robots&author=Arpenti+P.&author=Ruggiero+F.&author=Lippiello+V.&publication+year=2020",openUrlParams:{genre:h,date:l,sid:d,title:bN},innerRefId:"r34",title:bN,doi:"10.1109\u002FICRA40945.2020.9196598",crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FICRA40945.2020.9196598",pubMedLink:a}]},{id:"ref35",displayNumber:"[35]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EArpenti\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EP.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EA constructive methodology for the IDA-PBC of underactuated 2-doF mechanical systems with explicit solution of PDEs\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EInt J Control, Auto Syst\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E20\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E1\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E283\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E297\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2022\u003C\u002Fspan\u003Eb).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for A constructive methodology for the IDA-PBC of underactuated 2-doF mechanical systems with explicit solution of PDEs' href=https:\u002F\u002Fdx.doi.org\u002F10.1007\u002Fs12555-020-0839-1\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for A constructive methodology for the IDA-PBC of underactuated 2-doF mechanical systems with explicit solution of PDEs' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+constructive+methodology+for+the+IDA-PBC+of+underactuated+2-doF+mechanical+systems+with+explicit+solution+of+PDEs&author=Arpenti+P.&author=Ruggiero+F.&author=Lippiello+V.&publication+year=2022&journal=Int+J+Control%2C+Auto+Syst&volume=20&doi=10.1007%2Fs12555-020-0839-1&pages=283-297\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+constructive+methodology+for+the+IDA-PBC+of+underactuated+2-doF+mechanical+systems+with+explicit+solution+of+PDEs&author=Arpenti+P.&author=Ruggiero+F.&author=Lippiello+V.&publication+year=2022&journal=Int+J+Control%2C+Auto+Syst&volume=20&doi=10.1007%2Fs12555-020-0839-1&pages=283-297",openUrlParams:{genre:e,atitle:bO,jtitle:bP,title:bP,volume:"20",artnum:"65f99c3d442f7c0001a347e4",spage:"283",epage:bQ,date:j,sid:d,aulast:a,aufirst:a,doi:bR,au:a},innerRefId:"r35",title:bO,doi:bR,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1007\u002Fs12555-020-0839-1",pubMedLink:a}]},{id:"ref36",displayNumber:"[36]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ENacusse\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EArpenti\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EP.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"chapter-title\"\u003EGait Generation for Underactuated Compass-Like Robots Using Dissipative Forces in the Controller\u003C\u002Fspan\u003E,” In: \u003Cspan class=\"source\"\u003EIFAC-PapersOnLine\u003C\u002Fspan\u003E. vol. \u003Cspan class=\"volume\"\u003E53\u003C\u002Fspan\u003E (\u003Cspan class=\"publisher-name\"\u003EIFAC World Congress\u003C\u002Fspan\u003E, \u003Cspan class=\"year\"\u003E2020\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E9023\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E9030\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for IFAC-PapersOnLine' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=IFAC-PapersOnLine&author=Nacusse+M.&author=Arpenti+P.&author=Ruggiero+F.&author=Lippiello+V.&publication+year=2020&pages=9023-9030\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=IFAC-PapersOnLine&author=Nacusse+M.&author=Arpenti+P.&author=Ruggiero+F.&author=Lippiello+V.&publication+year=2020&pages=9023-9030",openUrlParams:{genre:o,btitle:H,title:H,atitle:"Gait Generation for Underactuated Compass-Like Robots Using Dissipative Forces in the Controller",aulast:a,aufirst:a,au:a,pub:"IFAC World Congress",date:l,spage:"9023",epage:"9030",doi:a,sid:d},innerRefId:"r36",title:H,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref37",displayNumber:"[37]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMorlando\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ETethering a Human with a Quadruped Robot: A Guide Dog to Help Visually Impaired People\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003E31st Mediterranean Conference on Control and Automation (MED) 2023\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2023\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E547\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E553\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Tethering a Human with a Quadruped Robot: A Guide Dog to Help Visually Impaired People' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Tethering+a+Human+with+a+Quadruped+Robot%3A+A+Guide+Dog+to+Help+Visually+Impaired+People&author=Morlando+V.&author=Lippiello+V.&author=Ruggiero+F.&publication+year=2023\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Tethering+a+Human+with+a+Quadruped+Robot%3A+A+Guide+Dog+to+Help+Visually+Impaired+People&author=Morlando+V.&author=Lippiello+V.&author=Ruggiero+F.&publication+year=2023",openUrlParams:{genre:h,date:q,sid:d,title:bS},innerRefId:"r37",title:bS,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref38",displayNumber:"[38]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMorlando\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EDisturbance Rejection for Legged Robots Through a Hybrid Observer\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003E30th Mediterranean Conference on Control and Automation (MED)\u003C\u002Fem\u003E, \u003Cspan class=\"year\"\u003E2022\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E743\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E748\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Disturbance Rejection for Legged Robots Through a Hybrid Observer' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FMED54222.2022.9837169\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Disturbance Rejection for Legged Robots Through a Hybrid Observer' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Disturbance+Rejection+for+Legged+Robots+Through+a+Hybrid+Observer&author=Morlando+V.&author=Ruggiero+F.&publication+year=2022\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Disturbance+Rejection+for+Legged+Robots+Through+a+Hybrid+Observer&author=Morlando+V.&author=Ruggiero+F.&publication+year=2022",openUrlParams:{genre:h,date:j,sid:d,title:bT},innerRefId:"r38",title:bT,doi:"10.1109\u002FMED54222.2022.9837169",crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FMED54222.2022.9837169",pubMedLink:a}]},{id:"ref39",displayNumber:"[39]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMorlando\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ETeimoorzadeh\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EWhole-body control with disturbance rejection through a momentum-based observer for quadruped robots\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EMech Mach Theory\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E164\u003C\u002Fspan\u003E, \u003Cspan class=\"fpage\"\u003E104412\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2021\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Whole-body control with disturbance rejection through a momentum-based observer for quadruped robots' href=https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.mechmachtheory.2021.104412\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Whole-body control with disturbance rejection through a momentum-based observer for quadruped robots' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Whole-body+control+with+disturbance+rejection+through+a+momentum-based+observer+for+quadruped+robots&author=Morlando+V.&author=Teimoorzadeh+A.&author=Ruggiero+F.&publication+year=2021&journal=Mech+Mach+Theory&volume=164&doi=10.1016%2Fj.mechmachtheory.2021.104412\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Whole-body+control+with+disturbance+rejection+through+a+momentum-based+observer+for+quadruped+robots&author=Morlando+V.&author=Teimoorzadeh+A.&author=Ruggiero+F.&publication+year=2021&journal=Mech+Mach+Theory&volume=164&doi=10.1016%2Fj.mechmachtheory.2021.104412",openUrlParams:{genre:e,atitle:bU,jtitle:y,title:y,volume:"164",artnum:"65f99c3d442f7c0001a347e8",spage:"104412",epage:f,date:t,sid:d,aulast:a,aufirst:a,doi:bV,au:a},innerRefId:"r39",title:bU,doi:bV,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.mechmachtheory.2021.104412",pubMedLink:a}]},{id:"ref40",displayNumber:"[40]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EThe effect of shapes in input-state linearization for stabilization of nonprehensile planar rolling dynamic manipulation\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Robot Autom Lett\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E1\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E1\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E492\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E499\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2016\u003C\u002Fspan\u003Eb).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for The effect of shapes in input-state linearization for stabilization of nonprehensile planar rolling dynamic manipulation' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2016.2519147\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for The effect of shapes in input-state linearization for stabilization of nonprehensile planar rolling dynamic manipulation' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=The+effect+of+shapes+in+input-state+linearization+for+stabilization+of+nonprehensile+planar+rolling+dynamic+manipulation&author=Lippiello+V.&author=Ruggiero+F.&author=Siciliano+B.&publication+year=2016&journal=IEEE+Robot+Autom+Lett&volume=1&doi=10.1109%2FLRA.2016.2519147&pages=492-499\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=The+effect+of+shapes+in+input-state+linearization+for+stabilization+of+nonprehensile+planar+rolling+dynamic+manipulation&author=Lippiello+V.&author=Ruggiero+F.&author=Siciliano+B.&publication+year=2016&journal=IEEE+Robot+Autom+Lett&volume=1&doi=10.1109%2FLRA.2016.2519147&pages=492-499",openUrlParams:{genre:e,atitle:bW,jtitle:s,title:s,volume:E,artnum:"65f99c3d442f7c0001a347e9",spage:"492",epage:"499",date:v,sid:d,aulast:a,aufirst:a,doi:bX,au:a},innerRefId:"r40",title:bW,doi:bX,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2016.2519147",pubMedLink:a}]},{id:"ref41",displayNumber:"[41]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EHeins\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESchoellig\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA. P.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EKeep it upright: Model predictive control for nonprehensile object transportation with obstacle avoidance on a mobile manipulator\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Robot Auto Lett\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E8\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E12\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E7986\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E7993\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2023\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Keep it upright: Model predictive control for nonprehensile object transportation with obstacle avoidance on a mobile manipulator' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2023.3324520\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Keep it upright: Model predictive control for nonprehensile object transportation with obstacle avoidance on a mobile manipulator' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Keep+it+upright%3A+Model+predictive+control+for+nonprehensile+object+transportation+with+obstacle+avoidance+on+a+mobile+manipulator&author=Heins+A.&author=Schoellig+A.+P.&publication+year=2023&journal=IEEE+Robot+Auto+Lett&volume=8&doi=10.1109%2FLRA.2023.3324520&pages=7986-7993\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Keep+it+upright%3A+Model+predictive+control+for+nonprehensile+object+transportation+with+obstacle+avoidance+on+a+mobile+manipulator&author=Heins+A.&author=Schoellig+A.+P.&publication+year=2023&journal=IEEE+Robot+Auto+Lett&volume=8&doi=10.1109%2FLRA.2023.3324520&pages=7986-7993",openUrlParams:{genre:e,atitle:bY,jtitle:i,title:i,volume:$,artnum:"65f99c3d442f7c0001a347ea",spage:"7986",epage:"7993",date:q,sid:d,aulast:a,aufirst:a,doi:bZ,au:a},innerRefId:"r41",title:bY,doi:bZ,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2023.3324520",pubMedLink:a}]},{id:"ref42",displayNumber:"[42]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMuchacho\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER. I. C.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELaha\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFigueredo\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EL. F.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EHaddadin\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EA Solution to Slosh-Free Robot Trajectory Optimization\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EIEEE\u002FRSJ International Conference on Intelligent Robots and Systems (IROS)\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2022\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E223\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E230\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for A Solution to Slosh-Free Robot Trajectory Optimization' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+Solution+to+Slosh-Free+Robot+Trajectory+Optimization&author=Muchacho+R.+I.+C.&author=Laha+R.&author=Figueredo+L.+F.&author=Haddadin+S.&publication+year=2022\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+Solution+to+Slosh-Free+Robot+Trajectory+Optimization&author=Muchacho+R.+I.+C.&author=Laha+R.&author=Figueredo+L.+F.&author=Haddadin+S.&publication+year=2022",openUrlParams:{genre:h,date:j,sid:d,title:b_},innerRefId:"r42",title:b_,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref43",displayNumber:"[43]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMuchacho\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER. I. C.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBien\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELaha\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ENaceri\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFigueredo\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EL. F.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EHaddadin\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E. \u003Cspan class=\"article-title\"\u003EShared Autonomy Control for Slosh-Free Teleoperation\u003C\u002Fspan\u003E. In: \u003Cem class=\"italic\"\u003EIEEE\u002FRSJ International Conference on Intelligent Robots and Systems (IROS)\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2023\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E10676\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E10683\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Shared Autonomy Control for Slosh-Free Teleoperation' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Shared+Autonomy+Control+for+Slosh-Free+Teleoperation&author=Muchacho+R.+I.+C.&author=Bien+S.&author=Laha+R.&author=Naceri+A.&author=Figueredo+L.+F.&author=Haddadin+S.&publication+year=2023\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Shared+Autonomy+Control+for+Slosh-Free+Teleoperation&author=Muchacho+R.+I.+C.&author=Bien+S.&author=Laha+R.&author=Naceri+A.&author=Figueredo+L.+F.&author=Haddadin+S.&publication+year=2023",openUrlParams:{genre:h,date:q,sid:d,title:b$},innerRefId:"r43",title:b$,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref44",displayNumber:"[44]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMeriçli\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ET.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EVeloso\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EAkın\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EH. L.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EPush-manipulation of complex passive mobile objects using experimentally acquired motion models\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EAuton Robot\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E38\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E3\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E317\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E329\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2015\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Push-manipulation of complex passive mobile objects using experimentally acquired motion models' href=https:\u002F\u002Fdx.doi.org\u002F10.1007\u002Fs10514-014-9414-z\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Push-manipulation of complex passive mobile objects using experimentally acquired motion models' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Push-manipulation+of+complex+passive+mobile+objects+using+experimentally+acquired+motion+models&author=Meri%C3%A7li+T.&author=Veloso+M.&author=Ak%C4%B1n+H.+L.&publication+year=2015&journal=Auton+Robot&volume=38&doi=10.1007%2Fs10514-014-9414-z&pages=317-329\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Push-manipulation+of+complex+passive+mobile+objects+using+experimentally+acquired+motion+models&author=Meri%C3%A7li+T.&author=Veloso+M.&author=Ak%C4%B1n+H.+L.&publication+year=2015&journal=Auton+Robot&volume=38&doi=10.1007%2Fs10514-014-9414-z&pages=317-329",openUrlParams:{genre:e,atitle:ca,jtitle:x,title:x,volume:N,artnum:"65f99c3d442f7c0001a347ed",spage:aG,epage:aH,date:C,sid:d,aulast:a,aufirst:a,doi:cb,au:a},innerRefId:"r44",title:ca,doi:cb,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1007\u002Fs10514-014-9414-z",pubMedLink:a}]},{id:"ref45",displayNumber:"[45]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ENovin\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER. S.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EYazdani\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMerryweather\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EHermans\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ET.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EA model predictive approach for online mobile manipulation of non-holonomic objects using learned dynamics\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EInt J Robot Res\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E40\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E4-5\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E815\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E831\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2021\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for A model predictive approach for online mobile manipulation of non-holonomic objects using learned dynamics' href=https:\u002F\u002Fdx.doi.org\u002F10.1177\u002F0278364921992793\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for A model predictive approach for online mobile manipulation of non-holonomic objects using learned dynamics' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+model+predictive+approach+for+online+mobile+manipulation+of+non-holonomic+objects+using+learned+dynamics&author=Novin+R.+S.&author=Yazdani+A.&author=Merryweather+A.&author=Hermans+T.&publication+year=2021&journal=Int+J+Robot+Res&volume=40&doi=10.1177%2F0278364921992793&pages=815-831\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+model+predictive+approach+for+online+mobile+manipulation+of+non-holonomic+objects+using+learned+dynamics&author=Novin+R.+S.&author=Yazdani+A.&author=Merryweather+A.&author=Hermans+T.&publication+year=2021&journal=Int+J+Robot+Res&volume=40&doi=10.1177%2F0278364921992793&pages=815-831",openUrlParams:{genre:e,atitle:cc,jtitle:w,title:w,volume:bc,artnum:"65f99c3d442f7c0001a347ee",spage:"815",epage:"831",date:t,sid:d,aulast:a,aufirst:a,doi:cd,au:a},innerRefId:"r45",title:cc,doi:cd,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1177\u002F0278364921992793",pubMedLink:a}]},{id:"ref46",displayNumber:"[46]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMahony\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EHamel\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ET.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ERobust trajectory tracking for a scale model autonomous helicopter\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EInt J Robust Nonlin\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E14\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E12\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E1035\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E1059\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2004\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Robust trajectory tracking for a scale model autonomous helicopter' href=https:\u002F\u002Fdx.doi.org\u002F10.1002\u002Frnc.931\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Robust trajectory tracking for a scale model autonomous helicopter' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Robust+trajectory+tracking+for+a+scale+model+autonomous+helicopter&author=Mahony+R.&author=Hamel+T.&publication+year=2004&journal=Int+J+Robust+Nonlin&volume=14&doi=10.1002%2Frnc.931&pages=1035-1059\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Robust+trajectory+tracking+for+a+scale+model+autonomous+helicopter&author=Mahony+R.&author=Hamel+T.&publication+year=2004&journal=Int+J+Robust+Nonlin&volume=14&doi=10.1002%2Frnc.931&pages=1035-1059",openUrlParams:{genre:e,atitle:ce,jtitle:cf,title:cf,volume:cg,artnum:"65f99c3d442f7c0001a347ef",spage:"1035",epage:"1059",date:W,sid:d,aulast:a,aufirst:a,doi:ch,au:a},innerRefId:"r46",title:ce,doi:ch,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1002\u002Frnc.931",pubMedLink:a}]},{id:"ref47",displayNumber:"[47]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ENonami\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EK.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EKendoul\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESuzuki\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EWang\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EW.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"chapter-title\"\u003EAutonomous Flying Robots\u003C\u002Fspan\u003E,” In: \u003Cspan class=\"source\"\u003EUnmanned Aerial Vehicles and Micro Aerial Vehicles\u003C\u002Fspan\u003E, (\u003Cspan class=\"publisher-name\"\u003ESpringer-Verlag\u003C\u002Fspan\u003E, \u003Cspan class=\"publisher-loc\"\u003EBerlin Heidelberg, D\u003C\u002Fspan\u003E, \u003Cspan class=\"year\"\u003E2010\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Unmanned Aerial Vehicles and Micro Aerial Vehicles' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Unmanned+Aerial+Vehicles+and+Micro+Aerial+Vehicles&author=Nonami+K.&author=Kendoul+F.&author=Suzuki+S.&author=Wang+W.&publication+year=2010\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Unmanned+Aerial+Vehicles+and+Micro+Aerial+Vehicles&author=Nonami+K.&author=Kendoul+F.&author=Suzuki+S.&author=Wang+W.&publication+year=2010",openUrlParams:{genre:o,btitle:ac,title:ac,atitle:"Autonomous Flying Robots",aulast:a,aufirst:a,au:a,pub:"Springer-Verlag",date:ad,spage:f,epage:f,doi:a,sid:d},innerRefId:"r47",title:ac,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref48",displayNumber:"[48]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESpica\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFranchi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EOriolo\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBülthoff\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EH.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGiordano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EP. R.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EAerial Grasping of a Moving Target with a Quadrotor UAV\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EIEEE\u002FRSJ International Conference on Intelligent Robots and Systems\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2012\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E4985\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E4992\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Aerial Grasping of a Moving Target with a Quadrotor UAV' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Aerial+Grasping+of+a+Moving+Target+with+a+Quadrotor+UAV&author=Spica+R.&author=Franchi+A.&author=Oriolo+G.&author=B%C3%BClthoff+H.&author=Giordano+P.+R.&publication+year=2012\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Aerial+Grasping+of+a+Moving+Target+with+a+Quadrotor+UAV&author=Spica+R.&author=Franchi+A.&author=Oriolo+G.&author=B%C3%BClthoff+H.&author=Giordano+P.+R.&publication+year=2012",openUrlParams:{genre:h,date:"2012",sid:d,title:ci},innerRefId:"r48",title:ci,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref49",displayNumber:"[49]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EAntonelli\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECataldi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EE.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGiordano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EP. R.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EChiaverini\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFranchi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EExperimental Validation of a New Adaptive Control Scheme for Quadrotors MAVs\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EIEEE\u002FRSJ International Conference on Intelligent Robots and Systems 2013\u003C\u002Fem\u003E, \u003Cspan class=\"publisher-loc\"\u003ETokyo, J\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2013\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E3496\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E3501\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Experimental Validation of a New Adaptive Control Scheme for Quadrotors MAVs' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Experimental+Validation+of+a+New+Adaptive+Control+Scheme+for+Quadrotors+MAVs&author=Antonelli+G.&author=Cataldi+E.&author=Giordano+P.+R.&author=Chiaverini+S.&author=Franchi+A.&publication+year=2013\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Experimental+Validation+of+a+New+Adaptive+Control+Scheme+for+Quadrotors+MAVs&author=Antonelli+G.&author=Cataldi+E.&author=Giordano+P.+R.&author=Chiaverini+S.&author=Franchi+A.&publication+year=2013",openUrlParams:{genre:h,date:Q,sid:d,title:cj},innerRefId:"r49",title:cj,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref50",displayNumber:"[50]",existInContent:c,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EDydek\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EZ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EAnnaswamy\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELavretsky\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EE.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EAdaptive control of quadrotor UAVs: A design trade study with flight evaluations\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Trans Contr Syst Tech\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E21\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E4\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E1400\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E1406\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2013\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Adaptive control of quadrotor UAVs: A design trade study with flight evaluations' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTCST.2012.2200104\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Adaptive control of quadrotor UAVs: A design trade study with flight evaluations' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Adaptive+control+of+quadrotor+UAVs%3A+A+design+trade+study+with+flight+evaluations&author=Dydek+Z.&author=Annaswamy+A.&author=Lavretsky+E.&publication+year=2013&journal=IEEE+Trans+Contr+Syst+Tech&volume=21&doi=10.1109%2FTCST.2012.2200104&pages=1400-1406\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Adaptive+control+of+quadrotor+UAVs%3A+A+design+trade+study+with+flight+evaluations&author=Dydek+Z.&author=Annaswamy+A.&author=Lavretsky+E.&publication+year=2013&journal=IEEE+Trans+Contr+Syst+Tech&volume=21&doi=10.1109%2FTCST.2012.2200104&pages=1400-1406",openUrlParams:{genre:e,atitle:ck,jtitle:P,title:P,volume:ae,artnum:"65f99c3d442f7c0001a347f3",spage:"1400",epage:"1406",date:Q,sid:d,aulast:a,aufirst:a,doi:cl,au:a},innerRefId:"r50",title:ck,doi:cl,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTCST.2012.2200104",pubMedLink:a}]},{id:"ref51",displayNumber:"[51]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERoberts\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ETayebi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EAdaptive position tracking of VTOL UAVs\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Trans Robot\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E27\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E1\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E129\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E142\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2011\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Adaptive position tracking of VTOL UAVs' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTRO.2010.2092870\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Adaptive position tracking of VTOL UAVs' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Adaptive+position+tracking+of+VTOL+UAVs&author=Roberts+A.&author=Tayebi+A.&publication+year=2011&journal=IEEE+Trans+Robot&volume=27&doi=10.1109%2FTRO.2010.2092870&pages=129-142\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Adaptive+position+tracking+of+VTOL+UAVs&author=Roberts+A.&author=Tayebi+A.&publication+year=2011&journal=IEEE+Trans+Robot&volume=27&doi=10.1109%2FTRO.2010.2092870&pages=129-142",openUrlParams:{genre:e,atitle:cm,jtitle:m,title:m,volume:"27",artnum:"65f99c3d442f7c0001a347f4",spage:"129",epage:"142",date:cn,sid:d,aulast:a,aufirst:a,doi:co,au:a},innerRefId:"r51",title:cm,doi:co,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTRO.2010.2092870",pubMedLink:a}]},{id:"ref52",displayNumber:"[52]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EYüksel\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESecchi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EC.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBülthoff\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EH.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFranchi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EA Nonlinear Force Observer for Quadrotors and Application to Physical Interactive Tasks\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003E2014 IEEE\u002FASME International Conference on Advanced Intelligent Mechatronics\u003C\u002Fem\u003E, \u003Cspan class=\"publisher-loc\"\u003EBesançon, France\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2014\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E433\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E440\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for A Nonlinear Force Observer for Quadrotors and Application to Physical Interactive Tasks' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+Nonlinear+Force+Observer+for+Quadrotors+and+Application+to+Physical+Interactive+Tasks&author=Y%C3%BCksel+B.&author=Secchi+C.&author=B%C3%BClthoff+H.&author=Franchi+A.&publication+year=2014\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+Nonlinear+Force+Observer+for+Quadrotors+and+Application+to+Physical+Interactive+Tasks&author=Y%C3%BCksel+B.&author=Secchi+C.&author=B%C3%BClthoff+H.&author=Franchi+A.&publication+year=2014",openUrlParams:{genre:h,date:D,sid:d,title:cp},innerRefId:"r52",title:cp,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref53",displayNumber:"[53]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EEgeland\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EO.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGodhavn\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.-M.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EPassivity-based adaptive attitude control of a rigid spacecraft\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Trans Automat Contr\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E39\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E4\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E842\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E846\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E1994\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Passivity-based adaptive attitude control of a rigid spacecraft' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002F9.286266\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Passivity-based adaptive attitude control of a rigid spacecraft' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Passivity-based+adaptive+attitude+control+of+a+rigid+spacecraft&author=Egeland+O.&author=Godhavn+J.-M.&publication+year=1994&journal=IEEE+Trans+Automat+Contr&volume=39&doi=10.1109%2F9.286266&pages=842-846\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Passivity-based+adaptive+attitude+control+of+a+rigid+spacecraft&author=Egeland+O.&author=Godhavn+J.-M.&publication+year=1994&journal=IEEE+Trans+Automat+Contr&volume=39&doi=10.1109%2F9.286266&pages=842-846",openUrlParams:{genre:e,atitle:cq,jtitle:cr,title:cr,volume:"39",artnum:"65f99c3d442f7c0001a347f6",spage:"842",epage:"846",date:"1994",sid:d,aulast:a,aufirst:a,doi:cs,au:a},innerRefId:"r53",title:cq,doi:cs,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002F9.286266",pubMedLink:a}]},{id:"ref54",displayNumber:"[54]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EHa\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EC.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EZuo\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EZ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EChoi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELee\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EPassivity-based adaptive backstepping control of quadrotor-type UAVs\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003ERobot Auton Syst\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E62\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E9\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E1305\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E1315\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2014\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Passivity-based adaptive backstepping control of quadrotor-type UAVs' href=https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.robot.2014.03.019\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Passivity-based adaptive backstepping control of quadrotor-type UAVs' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Passivity-based+adaptive+backstepping+control+of+quadrotor-type+UAVs&author=Ha+C.&author=Zuo+Z.&author=Choi+F.&author=Lee+D.&publication+year=2014&journal=Robot+Auton+Syst&volume=62&doi=10.1016%2Fj.robot.2014.03.019&pages=1305-1315\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Passivity-based+adaptive+backstepping+control+of+quadrotor-type+UAVs&author=Ha+C.&author=Zuo+Z.&author=Choi+F.&author=Lee+D.&publication+year=2014&journal=Robot+Auton+Syst&volume=62&doi=10.1016%2Fj.robot.2014.03.019&pages=1305-1315",openUrlParams:{genre:e,atitle:ct,jtitle:r,title:r,volume:a_,artnum:"65f99c3d442f7c0001a347f7",spage:"1305",epage:"1315",date:D,sid:d,aulast:a,aufirst:a,doi:cu,au:a},innerRefId:"r54",title:ct,doi:cu,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.robot.2014.03.019",pubMedLink:a}]},{id:"ref55",displayNumber:"[55]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EValvanis\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EK.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E. \u003Cspan class=\"source\"\u003EAdvances in Unmanned Aerial Vehicles: State of the Art and the Road to Autonomy volume 33 of Intelligent Systems, Control and Automation: Science and Engineering\u003C\u002Fspan\u003E (\u003Cspan class=\"publisher-name\"\u003ESpringer\u003C\u002Fspan\u003E, \u003Cspan class=\"publisher-loc\"\u003ENetherlands\u003C\u002Fspan\u003E, \u003Cspan class=\"year\"\u003E2007\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Advances in Unmanned Aerial Vehicles: State of the Art and the Road to Autonomy volume 33 of Intelligent Systems, Control and Automation: Science and Engineering' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Advances+in+Unmanned+Aerial+Vehicles%3A+State+of+the+Art+and+the+Road+to+Autonomy+volume+33+of+Intelligent+Systems%2C+Control+and+Automation%3A+Science+and+Engineering&author=Valvanis+K.&publication+year=2007\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Advances+in+Unmanned+Aerial+Vehicles%3A+State+of+the+Art+and+the+Road+to+Autonomy+volume+33+of+Intelligent+Systems%2C+Control+and+Automation%3A+Science+and+Engineering&author=Valvanis+K.&publication+year=2007",openUrlParams:{genre:af,btitle:ag,title:ag,aulast:a,aufirst:a,au:a,pub:B,date:_,tpages:f,doi:a,sid:d},innerRefId:"r55",title:ag,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref56",displayNumber:"[56]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EValvanis\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EK.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EVachtsevanos\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E. \u003Cspan class=\"source\"\u003EHandbook of Unmanned Aerial Vehicles\u003C\u002Fspan\u003E (\u003Cspan class=\"publisher-name\"\u003ESpringer\u003C\u002Fspan\u003E, \u003Cspan class=\"publisher-loc\"\u003ENetherlands\u003C\u002Fspan\u003E, \u003Cspan class=\"year\"\u003E2015\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Handbook of Unmanned Aerial Vehicles' href=https:\u002F\u002Fdx.doi.org\u002F10.1007\u002F978-90-481-9707-1\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Handbook of Unmanned Aerial Vehicles' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Handbook+of+Unmanned+Aerial+Vehicles&author=Valvanis+K.&author=Vachtsevanos+G.&publication+year=2015\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Handbook+of+Unmanned+Aerial+Vehicles&author=Valvanis+K.&author=Vachtsevanos+G.&publication+year=2015",openUrlParams:{genre:af,btitle:ah,title:ah,aulast:a,aufirst:a,au:a,pub:B,date:C,tpages:f,doi:cv,sid:d},innerRefId:"r56",title:ah,doi:cv,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1007\u002F978-90-481-9707-1",pubMedLink:a}]},{id:"ref57",displayNumber:"[57]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EOller\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ETognon\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESuarez\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELee\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFranchi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EPast, present, and future of aerial robotic manipulators\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Trans Robot\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E38\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E1\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E626\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E645\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2022\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Past, present, and future of aerial robotic manipulators' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTRO.2021.3084395\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Past, present, and future of aerial robotic manipulators' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Past%2C+present%2C+and+future+of+aerial+robotic+manipulators&author=Oller+A.&author=Tognon+M.&author=Suarez+A.&author=Lee+D.&author=Franchi+A.&publication+year=2022&journal=IEEE+Trans+Robot&volume=38&doi=10.1109%2FTRO.2021.3084395&pages=626-645\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Past%2C+present%2C+and+future+of+aerial+robotic+manipulators&author=Oller+A.&author=Tognon+M.&author=Suarez+A.&author=Lee+D.&author=Franchi+A.&publication+year=2022&journal=IEEE+Trans+Robot&volume=38&doi=10.1109%2FTRO.2021.3084395&pages=626-645",openUrlParams:{genre:e,atitle:cw,jtitle:m,title:m,volume:N,artnum:"65f99c3d442f7c0001a347fa",spage:"626",epage:"645",date:j,sid:d,aulast:a,aufirst:a,doi:cx,au:a},innerRefId:"r57",title:cw,doi:cx,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTRO.2021.3084395",pubMedLink:a}]},{id:"ref58",displayNumber:"[58]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EOllero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EAerial manipulation: A literature review\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Robot Auto Lett\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E3\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E3\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E1957\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E1964\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2018\u003C\u002Fspan\u003Ea).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Aerial manipulation: A literature review' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2018.2808541\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Aerial manipulation: A literature review' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Aerial+manipulation%3A+A+literature+review&author=Ruggiero+F.&author=Lippiello+V.&author=Ollero+A.&publication+year=2018&journal=IEEE+Robot+Auto+Lett&volume=3&doi=10.1109%2FLRA.2018.2808541&pages=1957-1964\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Aerial+manipulation%3A+A+literature+review&author=Ruggiero+F.&author=Lippiello+V.&author=Ollero+A.&publication+year=2018&journal=IEEE+Robot+Auto+Lett&volume=3&doi=10.1109%2FLRA.2018.2808541&pages=1957-1964",openUrlParams:{genre:e,atitle:cy,jtitle:i,title:i,volume:z,artnum:"65f99c3d442f7c0001a347fb",spage:"1957",epage:"1964",date:n,sid:d,aulast:a,aufirst:a,doi:cz,au:a},innerRefId:"r58",title:cy,doi:cz,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2018.2808541",pubMedLink:a}]},{id:"ref59",displayNumber:"[59]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECacace\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESadeghian\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EH.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EPassivity-based control of vtol uavs with a momentum-based estimator of external wrench and unmodeled dynamics\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003ERobot Auton Syst\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E72\u003C\u002Fspan\u003E, \u003Cspan class=\"fpage\"\u003E139\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E151\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2015\u003C\u002Fspan\u003Ea).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Passivity-based control of vtol uavs with a momentum-based estimator of external wrench and unmodeled dynamics' href=https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.robot.2015.05.006\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Passivity-based control of vtol uavs with a momentum-based estimator of external wrench and unmodeled dynamics' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Passivity-based+control+of+vtol+uavs+with+a+momentum-based+estimator+of+external+wrench+and+unmodeled+dynamics&author=Ruggiero+F.&author=Cacace+J.&author=Sadeghian+H.&author=Lippiello+V.&publication+year=2015&journal=Robot+Auton+Syst&volume=72&doi=10.1016%2Fj.robot.2015.05.006&pages=139-151\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Passivity-based+control+of+vtol+uavs+with+a+momentum-based+estimator+of+external+wrench+and+unmodeled+dynamics&author=Ruggiero+F.&author=Cacace+J.&author=Sadeghian+H.&author=Lippiello+V.&publication+year=2015&journal=Robot+Auton+Syst&volume=72&doi=10.1016%2Fj.robot.2015.05.006&pages=139-151",openUrlParams:{genre:e,atitle:cA,jtitle:r,title:r,volume:"72",artnum:"65f99c3d442f7c0001a347fc",spage:"139",epage:be,date:C,sid:d,aulast:a,aufirst:a,doi:cB,au:a},innerRefId:"r59",title:cA,doi:cB,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.robot.2015.05.006",pubMedLink:a}]},{id:"ref60",displayNumber:"[60]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESotos\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES. M. O.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECacace\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EActive disturbance rejection control for the robust flight of a passively tilted hexarotor\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EDrones\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E6\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E9\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E250\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2022\u003C\u002Fspan\u003Ea).\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Active disturbance rejection control for the robust flight of a passively tilted hexarotor' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Active+disturbance+rejection+control+for+the+robust+flight+of+a+passively+tilted+hexarotor&author=Sotos+S.+M.+O.&author=Cacace+J.&author=Ruggiero+F.&author=Lippiello+V.&publication+year=2022&journal=Drones&volume=6\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Active+disturbance+rejection+control+for+the+robust+flight+of+a+passively+tilted+hexarotor&author=Sotos+S.+M.+O.&author=Cacace+J.&author=Ruggiero+F.&author=Lippiello+V.&publication+year=2022&journal=Drones&volume=6",openUrlParams:{genre:e,atitle:cC,jtitle:S,title:S,volume:I,artnum:"65f99c3d442f7c0001a347fd",spage:"250",epage:f,date:j,sid:d,aulast:a,aufirst:a,doi:a,au:a},innerRefId:"r60",title:cC,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref61",displayNumber:"[61]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESotos\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES. M. O.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EGlobally attractive hyperbolic control for the robust flight of an actively tilting quadrotor\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EDrones\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E6\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E12\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E373\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2022\u003C\u002Fspan\u003Eb).\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Globally attractive hyperbolic control for the robust flight of an actively tilting quadrotor' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Globally+attractive+hyperbolic+control+for+the+robust+flight+of+an+actively+tilting+quadrotor&author=Sotos+S.+M.+O.&author=Ruggiero+F.&author=Lippiello+V.&publication+year=2022&journal=Drones&volume=6\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Globally+attractive+hyperbolic+control+for+the+robust+flight+of+an+actively+tilting+quadrotor&author=Sotos+S.+M.+O.&author=Ruggiero+F.&author=Lippiello+V.&publication+year=2022&journal=Drones&volume=6",openUrlParams:{genre:e,atitle:cD,jtitle:S,title:S,volume:I,artnum:"65f99c3d442f7c0001a347fe",spage:"373",epage:f,date:j,sid:d,aulast:a,aufirst:a,doi:a,au:a},innerRefId:"r61",title:cD,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref62",displayNumber:"[62]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESerra\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EEmergency Landing for a Quadrotor in Case of a Propeller Failure: A Backstepping Approach\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EIEEE\u002FRSJ International Conference on Intelligent Robots and Systems\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2014\u003C\u002Fspan\u003Ea) pp. \u003Cspan class=\"fpage\"\u003E4782\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E4788\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Emergency Landing for a Quadrotor in Case of a Propeller Failure: A Backstepping Approach' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FIROS.2014.6943242\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Emergency Landing for a Quadrotor in Case of a Propeller Failure: A Backstepping Approach' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Emergency+Landing+for+a+Quadrotor+in+Case+of+a+Propeller+Failure%3A+A+Backstepping+Approach&author=Lippiello+V.&author=Ruggiero+F.&author=Serra+D.&publication+year=2014\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Emergency+Landing+for+a+Quadrotor+in+Case+of+a+Propeller+Failure%3A+A+Backstepping+Approach&author=Lippiello+V.&author=Ruggiero+F.&author=Serra+D.&publication+year=2014",openUrlParams:{genre:h,date:D,sid:d,title:cE},innerRefId:"r62",title:cE,doi:"10.1109\u002FIROS.2014.6943242",crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FIROS.2014.6943242",pubMedLink:a}]},{id:"ref63",displayNumber:"[63]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESerra\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E. \u003Cspan class=\"article-title\"\u003EEmergency Landing for a Quadrotor in Case of a Propeller Failure: A PID Based Approach\u003C\u002Fspan\u003E. In: \u003Cem class=\"italic\"\u003E12th IEEE International Symposium on Safety, Security, and Rescue Robotics\u003C\u002Fem\u003E, \u003Cspan class=\"year\"\u003E2014\u003C\u002Fspan\u003Eb.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Emergency Landing for a Quadrotor in Case of a Propeller Failure: A PID Based Approach' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Emergency+Landing+for+a+Quadrotor+in+Case+of+a+Propeller+Failure%3A+A+PID+Based+Approach&author=Lippiello+V.&author=Ruggiero+F.&author=Serra+D.&publication+year=2014\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Emergency+Landing+for+a+Quadrotor+in+Case+of+a+Propeller+Failure%3A+A+PID+Based+Approach&author=Lippiello+V.&author=Ruggiero+F.&author=Serra+D.&publication+year=2014",openUrlParams:{genre:h,date:D,sid:d,title:cF},innerRefId:"r63",title:cF,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref64",displayNumber:"[64]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ED’Ago\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESelvaggio\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESuarez\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGañán\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF. J.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBuonocore\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EL. R.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EDi Castro\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EOllero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EModelling and identification methods for simulation of cable-suspended dual-arm robotic systems\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003ERobot Auton Syst\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E175\u003C\u002Fspan\u003E, \u003Cspan class=\"fpage\"\u003E104643\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2024\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Modelling and identification methods for simulation of cable-suspended dual-arm robotic systems' href=https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.robot.2024.104643\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Modelling and identification methods for simulation of cable-suspended dual-arm robotic systems' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Modelling+and+identification+methods+for+simulation+of+cable-suspended+dual-arm+robotic+systems&author=D%E2%80%99Ago+G.&author=Selvaggio+M.&author=Suarez+A.&author=Ga%C3%B1%C3%A1n+F.+J.&author=Buonocore+L.+R.&author=Di+Castro+M.&author=Lippiello+V.&author=Ollero+A.&author=Ruggiero+F.&publication+year=2024&journal=Robot+Auton+Syst&volume=175&doi=10.1016%2Fj.robot.2024.104643\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Modelling+and+identification+methods+for+simulation+of+cable-suspended+dual-arm+robotic+systems&author=D%E2%80%99Ago+G.&author=Selvaggio+M.&author=Suarez+A.&author=Ga%C3%B1%C3%A1n+F.+J.&author=Buonocore+L.+R.&author=Di+Castro+M.&author=Lippiello+V.&author=Ollero+A.&author=Ruggiero+F.&publication+year=2024&journal=Robot+Auton+Syst&volume=175&doi=10.1016%2Fj.robot.2024.104643",openUrlParams:{genre:e,atitle:cG,jtitle:r,title:r,volume:"175",artnum:"65f99c3d442f7c0001a34801",spage:"104643",epage:f,date:"2024",sid:d,aulast:a,aufirst:a,doi:cH,au:a},innerRefId:"r64",title:cG,doi:cH,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.robot.2024.104643",pubMedLink:a}]},{id:"ref65",displayNumber:"[65]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ETrujillo\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EAscorbe\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EH.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EViguria\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EPeréz\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EC.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EOllero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EA Multilayer Control for Multirotor uavs Equipped with a Servo Robot Arm\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EIEEE International Conference on Robotics and Automation\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2015\u003C\u002Fspan\u003Eb) pp. \u003Cspan class=\"fpage\"\u003E4014\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E4020\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for A Multilayer Control for Multirotor uavs Equipped with a Servo Robot Arm' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FICRA.2015.7139760\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for A Multilayer Control for Multirotor uavs Equipped with a Servo Robot Arm' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+Multilayer+Control+for+Multirotor+uavs+Equipped+with+a+Servo+Robot+Arm&author=Ruggiero+F.&author=Trujillo+M.&author=Cano+R.&author=Ascorbe+H.&author=Viguria+A.&author=Per%C3%A9z+C.&author=Lippiello+V.&author=Ollero+A.&author=Siciliano+B.&publication+year=2015\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+Multilayer+Control+for+Multirotor+uavs+Equipped+with+a+Servo+Robot+Arm&author=Ruggiero+F.&author=Trujillo+M.&author=Cano+R.&author=Ascorbe+H.&author=Viguria+A.&author=Per%C3%A9z+C.&author=Lippiello+V.&author=Ollero+A.&author=Siciliano+B.&publication+year=2015",openUrlParams:{genre:h,date:C,sid:d,title:cI},innerRefId:"r65",title:cI,doi:"10.1109\u002FICRA.2015.7139760",crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FICRA.2015.7139760",pubMedLink:a}]},{id:"ref66",displayNumber:"[66]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECacace\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFontanelli\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG. A.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"chapter-title\"\u003EA Novel Hybrid Aerial-Ground Manipulator for Pipeline Inspection Tasks\u003C\u002Fspan\u003E,” In: \u003Cspan class=\"source\"\u003EAerial Robotic Systems Physically Interacting with the Environment (AIRPHARO)\u003C\u002Fspan\u003E, Biograd na Moru, Croatia, (\u003Cspan class=\"year\"\u003E2021\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E1\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E6\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Aerial Robotic Systems Physically Interacting with the Environment (AIRPHARO)' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Aerial+Robotic+Systems+Physically+Interacting+with+the+Environment+(AIRPHARO)&author=Cacace+J.&author=Fontanelli+G.+A.&author=Lippiello+V.&publication+year=2021&pages=1-6\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Aerial+Robotic+Systems+Physically+Interacting+with+the+Environment+(AIRPHARO)&author=Cacace+J.&author=Fontanelli+G.+A.&author=Lippiello+V.&publication+year=2021&pages=1-6",openUrlParams:{genre:o,btitle:ai,title:ai,atitle:"A Novel Hybrid Aerial-Ground Manipulator for Pipeline Inspection Tasks",aulast:a,aufirst:a,au:a,pub:f,date:t,spage:E,epage:I,doi:a,sid:d},innerRefId:"r66",title:ai,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref67",displayNumber:"[67]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECacace\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESilva\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM. D.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFontanelli\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG. A.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EA novel articulated rover for industrial pipes inspection tasks\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EIEEE\u002FASME International Conference on Advanced Intelligent Mechatronics (AIM)\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2021\u003C\u002Fspan\u003Eb) pp. \u003Cspan class=\"fpage\"\u003E1027\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E1032\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for A novel articulated rover for industrial pipes inspection tasks' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FAIM46487.2021.9517691\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for A novel articulated rover for industrial pipes inspection tasks' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+novel+articulated+rover+for+industrial+pipes+inspection+tasks&author=Cacace+J.&author=Silva+M.+D.&author=Fontanelli+G.+A.&author=Lippiello+V.&publication+year=2021\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+novel+articulated+rover+for+industrial+pipes+inspection+tasks&author=Cacace+J.&author=Silva+M.+D.&author=Fontanelli+G.+A.&author=Lippiello+V.&publication+year=2021",openUrlParams:{genre:h,date:t,sid:d,title:cJ},innerRefId:"r67",title:cJ,doi:"10.1109\u002FAIM46487.2021.9517691",crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FAIM46487.2021.9517691",pubMedLink:a}]},{id:"ref68",displayNumber:"[68]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECuniato\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EE.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECacace\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESelvaggio\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E. \u003Cspan class=\"article-title\"\u003EA Hardware-in-the- Loop Simulator for Physical Human-Aerial Manipulator Cooperation\u003C\u002Fspan\u003E. In: \u003Cem class=\"italic\"\u003E20th International Conference on Advanced Robotics\u003C\u002Fem\u003E, ( \u003Cspan class=\"year\"\u003E2021\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E830\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E835\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for A Hardware-in-the- Loop Simulator for Physical Human-Aerial Manipulator Cooperation' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FICAR53236.2021.9659398\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for A Hardware-in-the- Loop Simulator for Physical Human-Aerial Manipulator Cooperation' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+Hardware-in-the-+Loop+Simulator+for+Physical+Human-Aerial+Manipulator+Cooperation&author=Cuniato+E.&author=Cacace+J.&author=Selvaggio+M.&author=Ruggiero+F.&author=Lippiello+V.&publication+year=2021\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+Hardware-in-the-+Loop+Simulator+for+Physical+Human-Aerial+Manipulator+Cooperation&author=Cuniato+E.&author=Cacace+J.&author=Selvaggio+M.&author=Ruggiero+F.&author=Lippiello+V.&publication+year=2021",openUrlParams:{genre:h,date:t,sid:d,title:cK},innerRefId:"r68",title:cK,doi:"10.1109\u002FICAR53236.2021.9659398",crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FICAR53236.2021.9659398",pubMedLink:a}]},{id:"ref69",displayNumber:"[69]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFontanelli\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG. A.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EImage-based visual-impedance control of a dual-arm aerial manipulator\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Robot Auto Lett\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E3\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E3\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E1856\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E1863\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2018\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Image-based visual-impedance control of a dual-arm aerial manipulator' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2018.2806091\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Image-based visual-impedance control of a dual-arm aerial manipulator' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Image-based+visual-impedance+control+of+a+dual-arm+aerial+manipulator&author=Lippiello+V.&author=Fontanelli+G.+A.&author=Ruggiero+F.&publication+year=2018&journal=IEEE+Robot+Auto+Lett&volume=3&doi=10.1109%2FLRA.2018.2806091&pages=1856-1863\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Image-based+visual-impedance+control+of+a+dual-arm+aerial+manipulator&author=Lippiello+V.&author=Fontanelli+G.+A.&author=Ruggiero+F.&publication+year=2018&journal=IEEE+Robot+Auto+Lett&volume=3&doi=10.1109%2FLRA.2018.2806091&pages=1856-1863",openUrlParams:{genre:e,atitle:cL,jtitle:i,title:i,volume:z,artnum:"65f99c3d442f7c0001a34806",spage:"1856",epage:"1863",date:n,sid:d,aulast:a,aufirst:a,doi:cM,au:a},innerRefId:"r69",title:cL,doi:cM,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2018.2806091",pubMedLink:a}]},{id:"ref70",displayNumber:"[70]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ED’Angelo\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EPagano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERuggiero\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EDevelopment of a Control Framework to Autonomously Install Clip Bird Diverters on High-Voltage Lines\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EInternational Conference on Unmanned Aircraft System\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2023\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Development of a Control Framework to Autonomously Install Clip Bird Diverters on High-Voltage Lines' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FICUAS57906.2023.10156403\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Development of a Control Framework to Autonomously Install Clip Bird Diverters on High-Voltage Lines' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Development+of+a+Control+Framework+to+Autonomously+Install+Clip+Bird+Diverters+on+High-Voltage+Lines&author=D%E2%80%99Angelo+S.&author=Pagano+F.&author=Ruggiero+F.&author=Lippiello+V.&publication+year=2023\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Development+of+a+Control+Framework+to+Autonomously+Install+Clip+Bird+Diverters+on+High-Voltage+Lines&author=D%E2%80%99Angelo+S.&author=Pagano+F.&author=Ruggiero+F.&author=Lippiello+V.&publication+year=2023",openUrlParams:{genre:h,date:q,sid:d,title:cN},innerRefId:"r70",title:cN,doi:"10.1109\u002FICUAS57906.2023.10156403",crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FICUAS57906.2023.10156403",pubMedLink:a}]},{id:"ref71",displayNumber:"[71]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESelvaggio\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECognetti\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ENikolaidis\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EIvaldi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EAutonomy in physical human-robot interaction: A brief survey\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Robot Autom Lett\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E6\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E4\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E7989\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E7996\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2021\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Autonomy in physical human-robot interaction: A brief survey' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2021.3100603\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Autonomy in physical human-robot interaction: A brief survey' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Autonomy+in+physical+human-robot+interaction%3A+A+brief+survey&author=Selvaggio+M.&author=Cognetti+M.&author=Nikolaidis+S.&author=Ivaldi+S.&author=Siciliano+B.&publication+year=2021&journal=IEEE+Robot+Autom+Lett&volume=6&doi=10.1109%2FLRA.2021.3100603&pages=7989-7996\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Autonomy+in+physical+human-robot+interaction%3A+A+brief+survey&author=Selvaggio+M.&author=Cognetti+M.&author=Nikolaidis+S.&author=Ivaldi+S.&author=Siciliano+B.&publication+year=2021&journal=IEEE+Robot+Autom+Lett&volume=6&doi=10.1109%2FLRA.2021.3100603&pages=7989-7996",openUrlParams:{genre:e,atitle:cO,jtitle:s,title:s,volume:I,artnum:"65f99c3d442f7c0001a34808",spage:"7989",epage:"7996",date:t,sid:d,aulast:a,aufirst:a,doi:cP,au:a},innerRefId:"r71",title:cO,doi:cP,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2021.3100603",pubMedLink:a}]},{id:"ref72",displayNumber:"[72]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EJohannsmeier\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EL.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EHaddadin\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EA hierarchical human-robot interaction-planning framework for task allocation in collaborative industrial assembly processes\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Robot Autom Lett\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E2\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E1\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E41\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E48\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2017\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for A hierarchical human-robot interaction-planning framework for task allocation in collaborative industrial assembly processes' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2016.2535907\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for A hierarchical human-robot interaction-planning framework for task allocation in collaborative industrial assembly processes' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+hierarchical+human-robot+interaction-planning+framework+for+task+allocation+in+collaborative+industrial+assembly+processes&author=Johannsmeier+L.&author=Haddadin+S.&publication+year=2017&journal=IEEE+Robot+Autom+Lett&volume=2&doi=10.1109%2FLRA.2016.2535907&pages=41-48\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+hierarchical+human-robot+interaction-planning+framework+for+task+allocation+in+collaborative+industrial+assembly+processes&author=Johannsmeier+L.&author=Haddadin+S.&publication+year=2017&journal=IEEE+Robot+Autom+Lett&volume=2&doi=10.1109%2FLRA.2016.2535907&pages=41-48",openUrlParams:{genre:e,atitle:cQ,jtitle:s,title:s,volume:aj,artnum:"65f99c3d442f7c0001a34809",spage:"41",epage:"48",date:A,sid:d,aulast:a,aufirst:a,doi:cR,au:a},innerRefId:"r72",title:cQ,doi:cR,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2016.2535907",pubMedLink:a}]},{id:"ref73",displayNumber:"[73]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECacace\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECaccavale\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFinzi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGrieco\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ECombining human guidance and structured task execution during physical human–robot collaboration\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EJ Intell Manuf\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E34\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E7\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E3053\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E3067\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2022\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Combining human guidance and structured task execution during physical human–robot collaboration' href=https:\u002F\u002Fdx.doi.org\u002F10.1007\u002Fs10845-022-01989-y\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Combining human guidance and structured task execution during physical human–robot collaboration' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Combining+human+guidance+and+structured+task+execution+during+physical+human%E2%80%93robot+collaboration&author=Cacace+J.&author=Caccavale+R.&author=Finzi+A.&author=Grieco+R.&publication+year=2022&journal=J+Intell+Manuf&volume=34&doi=10.1007%2Fs10845-022-01989-y&pages=3053-3067\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Combining+human+guidance+and+structured+task+execution+during+physical+human%E2%80%93robot+collaboration&author=Cacace+J.&author=Caccavale+R.&author=Finzi+A.&author=Grieco+R.&publication+year=2022&journal=J+Intell+Manuf&volume=34&doi=10.1007%2Fs10845-022-01989-y&pages=3053-3067",openUrlParams:{genre:e,atitle:cS,jtitle:cT,title:cT,volume:aR,artnum:"65f99c3d442f7c0001a3480a",spage:"3053",epage:"3067",date:j,sid:d,aulast:a,aufirst:a,doi:cU,au:a},innerRefId:"r73",title:cS,doi:cU,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1007\u002Fs10845-022-01989-y",pubMedLink:a}]},{id:"ref74",displayNumber:"[74]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECacace\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECaccavale\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFinzi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EInteractive plan execution during human-robot cooperative manipulation\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIFAC-PapersOnLine\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E51\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E22\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E500\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E505\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2018\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Interactive plan execution during human-robot cooperative manipulation' href=https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.ifacol.2018.11.584\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Interactive plan execution during human-robot cooperative manipulation' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Interactive+plan+execution+during+human-robot+cooperative+manipulation&author=Cacace+J.&author=Caccavale+R.&author=Finzi+A.&author=Lippiello+V.&publication+year=2018&journal=IFAC-PapersOnLine&volume=51&doi=10.1016%2Fj.ifacol.2018.11.584&pages=500-505\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Interactive+plan+execution+during+human-robot+cooperative+manipulation&author=Cacace+J.&author=Caccavale+R.&author=Finzi+A.&author=Lippiello+V.&publication+year=2018&journal=IFAC-PapersOnLine&volume=51&doi=10.1016%2Fj.ifacol.2018.11.584&pages=500-505",openUrlParams:{genre:e,atitle:cV,jtitle:H,title:H,volume:aW,artnum:"65f99c3d442f7c0001a3480b",spage:"500",epage:"505",date:n,sid:d,aulast:a,aufirst:a,doi:cW,au:a},innerRefId:"r74",title:cV,doi:cW,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.ifacol.2018.11.584",pubMedLink:a}]},{id:"ref75",displayNumber:"[75]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGoodrich\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM. A.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESchultz\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA. C.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EHuman-robot interaction: A survey\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EFound Trends® Human-Comp Inter\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E1\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E3\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E203\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E275\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2008\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Human-robot interaction: A survey' href=https:\u002F\u002Fdx.doi.org\u002F10.1561\u002F1100000005\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Human-robot interaction: A survey' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Human-robot+interaction%3A+A+survey&author=Goodrich+M.+A.&author=Schultz+A.+C.&publication+year=2008&journal=Found+Trends%C2%AE+Human-Comp+Inter&volume=1&doi=10.1561%2F1100000005&pages=203-275\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Human-robot+interaction%3A+A+survey&author=Goodrich+M.+A.&author=Schultz+A.+C.&publication+year=2008&journal=Found+Trends%C2%AE+Human-Comp+Inter&volume=1&doi=10.1561%2F1100000005&pages=203-275",openUrlParams:{genre:e,atitle:cX,jtitle:cY,title:cY,volume:E,artnum:"65f99c3d442f7c0001a3480c",spage:"203",epage:"275",date:G,sid:d,aulast:a,aufirst:a,doi:cZ,au:a},innerRefId:"r75",title:cX,doi:cZ,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1561\u002F1100000005",pubMedLink:a}]},{id:"ref76",displayNumber:"[76]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EYang\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG.-Z.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECambias\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECleary\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EK.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EDaimler\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EE.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EDrake\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EDupont\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EP. E.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EHata\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EN.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EKazanzides\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EP.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMartel\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EPatel\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER. V.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESantos\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV. J.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ETaylor\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER. H.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EMedical robotics 2014;regulatory, ethical, and legal considerations for increasing levels of autonomy\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003ESci Robot\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E2\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E4\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003Eeaam8638\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2017\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Medical robotics 2014;regulatory, ethical, and legal considerations for increasing levels of autonomy' href=https:\u002F\u002Fdx.doi.org\u002F10.1126\u002Fscirobotics.aam8638\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Medical robotics 2014;regulatory, ethical, and legal considerations for increasing levels of autonomy' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Medical+robotics+2014%3Bregulatory%2C+ethical%2C+and+legal+considerations+for+increasing+levels+of+autonomy&author=Yang+G.-Z.&author=Cambias+J.&author=Cleary+K.&author=Daimler+E.&author=Drake+J.&author=Dupont+P.+E.&author=Hata+N.&author=Kazanzides+P.&author=Martel+S.&author=Patel+R.+V.&author=Santos+V.+J.&author=Taylor+R.+H.&publication+year=2017&journal=Sci+Robot&volume=2&doi=10.1126%2Fscirobotics.aam8638\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Medical+robotics+2014%3Bregulatory%2C+ethical%2C+and+legal+considerations+for+increasing+levels+of+autonomy&author=Yang+G.-Z.&author=Cambias+J.&author=Cleary+K.&author=Daimler+E.&author=Drake+J.&author=Dupont+P.+E.&author=Hata+N.&author=Kazanzides+P.&author=Martel+S.&author=Patel+R.+V.&author=Santos+V.+J.&author=Taylor+R.+H.&publication+year=2017&journal=Sci+Robot&volume=2&doi=10.1126%2Fscirobotics.aam8638",openUrlParams:{genre:e,atitle:c_,jtitle:c$,title:c$,volume:aj,artnum:"65f99c3d442f7c0001a3480d",spage:"eaam8638",epage:f,date:A,sid:d,aulast:a,aufirst:a,doi:da,au:a},innerRefId:"r76",title:c_,doi:da,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1126\u002Fscirobotics.aam8638",pubMedLink:a}]},{id:"ref77",displayNumber:"[77]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EKanda\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ET.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EIshiguro\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EH.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E. \u003Cspan class=\"source\"\u003EHuman-Robot Interaction in Social Robotics\u003C\u002Fspan\u003E (\u003Cspan class=\"publisher-name\"\u003ECRC Press Boca Raton\u003C\u002Fspan\u003E, \u003Cspan class=\"publisher-loc\"\u003EFlorida\u003C\u002Fspan\u003E, (\u003Cspan class=\"year\"\u003E2017\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Human-Robot Interaction in Social Robotics' href=https:\u002F\u002Fdx.doi.org\u002F10.1201\u002Fb13004\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Human-Robot Interaction in Social Robotics' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Human-Robot+Interaction+in+Social+Robotics&author=Kanda+T.&author=Ishiguro+H.&publication+year=2017\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Human-Robot+Interaction+in+Social+Robotics&author=Kanda+T.&author=Ishiguro+H.&publication+year=2017",openUrlParams:{genre:af,btitle:ak,title:ak,aulast:a,aufirst:a,au:a,pub:"CRC Press Boca Raton",date:A,tpages:f,doi:db,sid:d},innerRefId:"r77",title:ak,doi:db,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1201\u002Fb13004",pubMedLink:a}]},{id:"ref78",displayNumber:"[78]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESchilling\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBurgard\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EW.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMuelling\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EK.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EWrede\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERitter\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EH.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EShared autonomy–learning of joint action and human-robot collaboration\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EFront Neurorobotics\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E13\u003C\u002Fspan\u003E, \u003Cspan class=\"fpage\"\u003E16\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2019\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Shared autonomy–learning of joint action and human-robot collaboration' href=https:\u002F\u002Fdx.doi.org\u002F10.3389\u002Ffnbot.2019.00016\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Shared autonomy–learning of joint action and human-robot collaboration' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Shared+autonomy%E2%80%93learning+of+joint+action+and+human-robot+collaboration&author=Schilling+M.&author=Burgard+W.&author=Muelling+K.&author=Wrede+B.&author=Ritter+H.&publication+year=2019&journal=Front+Neurorobotics&volume=13&doi=10.3389%2Ffnbot.2019.00016\u003EGoogle Scholar\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='PubMed link for Shared autonomy–learning of joint action and human-robot collaboration' href=https:\u002F\u002Fwww.ncbi.nlm.nih.gov\u002Fpubmed\u002F31156417\u003EPubMed\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Shared+autonomy%E2%80%93learning+of+joint+action+and+human-robot+collaboration&author=Schilling+M.&author=Burgard+W.&author=Muelling+K.&author=Wrede+B.&author=Ritter+H.&publication+year=2019&journal=Front+Neurorobotics&volume=13&doi=10.3389%2Ffnbot.2019.00016",openUrlParams:{genre:e,atitle:dc,jtitle:dd,title:dd,volume:"13",artnum:"65f99c3d442f7c0001a3480f",spage:de,epage:f,date:k,sid:d,aulast:a,aufirst:a,doi:df,au:a},innerRefId:"r78",title:dc,pubMedId:"31156417",doi:df,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.3389\u002Ffnbot.2019.00016",pubMedLink:"https:\u002F\u002Fwww.ncbi.nlm.nih.gov\u002Fpubmed\u002F31156417"}]},{id:"ref79",displayNumber:"[79]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBruemmer\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED. J.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EDudenhoeffer\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED. D.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMarble\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ. L.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"chapter-title\"\u003EDynamic-Autonomy for Urban Search and Rescue\u003C\u002Fspan\u003E,” In: \u003Cspan class=\"source\"\u003EAAAI Mobile Robot Competition\u003C\u002Fspan\u003E, (\u003Cspan class=\"year\"\u003E2002\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E33\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E37\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for AAAI Mobile Robot Competition' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=AAAI+Mobile+Robot+Competition&author=Bruemmer+D.+J.&author=Dudenhoeffer+D.+D.&author=Marble+J.+L.&publication+year=2002&pages=33-37\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=AAAI+Mobile+Robot+Competition&author=Bruemmer+D.+J.&author=Dudenhoeffer+D.+D.&author=Marble+J.+L.&publication+year=2002&pages=33-37",openUrlParams:{genre:o,btitle:al,title:al,atitle:"Dynamic-Autonomy for Urban Search and Rescue",aulast:a,aufirst:a,au:a,pub:f,date:Y,spage:dg,epage:dh,doi:a,sid:d},innerRefId:"r79",title:al,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref80",displayNumber:"[80]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EDias\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM. B.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EKannan\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBrowning\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EJones\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EE.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EArgall\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EDias\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM. F.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EZinck\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EVeloso\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EStentz\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ESliding Autonomy for Peer-to-Peer Human-Robot Teams\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EInternational Conference on Intelligent Autonomous Systems\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2008\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E332\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E341\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Sliding Autonomy for Peer-to-Peer Human-Robot Teams' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Sliding+Autonomy+for+Peer-to-Peer+Human-Robot+Teams&author=Dias+M.+B.&author=Kannan+B.&author=Browning+B.&author=Jones+E.&author=Argall+B.&author=Dias+M.+F.&author=Zinck+M.&author=Veloso+M.&author=Stentz+A.&publication+year=2008\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Sliding+Autonomy+for+Peer-to-Peer+Human-Robot+Teams&author=Dias+M.+B.&author=Kannan+B.&author=Browning+B.&author=Jones+E.&author=Argall+B.&author=Dias+M.+F.&author=Zinck+M.&author=Veloso+M.&author=Stentz+A.&publication+year=2008",openUrlParams:{genre:h,date:G,sid:d,title:di},innerRefId:"r80",title:di,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref81",displayNumber:"[81]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EKortenkamp\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EKeirn-Schreckenghost\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBonasso\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER. P.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EAdjustable Control Autonomy for Manned Space Flight\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EIEEE Aerospace Conference (AeroConf)\u003C\u002Fem\u003E, \u003Cspan class=\"volume\"\u003E7\u003C\u002Fspan\u003E, (\u003Cspan class=\"year\"\u003E2000\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E629\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E640\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Adjustable Control Autonomy for Manned Space Flight' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Adjustable+Control+Autonomy+for+Manned+Space+Flight&author=Kortenkamp+D.&author=Keirn-Schreckenghost+D.&author=Bonasso+R.+P.&publication+year=2000\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Adjustable+Control+Autonomy+for+Manned+Space+Flight&author=Kortenkamp+D.&author=Keirn-Schreckenghost+D.&author=Bonasso+R.+P.&publication+year=2000",openUrlParams:{genre:h,date:dj,sid:d,title:dk},innerRefId:"r81",title:dk,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref82",displayNumber:"[82]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EAnderson\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EPeters\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EIagnemma\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EK.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EOverholt\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ESemi-Autonomous Stability Control and Hazard Avoidance for Manned and Unmanned Ground Vehicles\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EThe 27th Army Science Conference\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2010\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E1\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E8\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Semi-Autonomous Stability Control and Hazard Avoidance for Manned and Unmanned Ground Vehicles' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Semi-Autonomous+Stability+Control+and+Hazard+Avoidance+for+Manned+and+Unmanned+Ground+Vehicles&author=Anderson+S.&author=Peters+S.&author=Iagnemma+K.&author=Overholt+J.&publication+year=2010\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Semi-Autonomous+Stability+Control+and+Hazard+Avoidance+for+Manned+and+Unmanned+Ground+Vehicles&author=Anderson+S.&author=Peters+S.&author=Iagnemma+K.&author=Overholt+J.&publication+year=2010",openUrlParams:{genre:h,date:ad,sid:d,title:dl},innerRefId:"r82",title:dl,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref83",displayNumber:"[83]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EDesai\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EYanco\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EH. A.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"chapter-title\"\u003EBlending Human and Robot Inputs for Sliding Scale Autonomy\u003C\u002Fspan\u003E,” In: \u003Cspan class=\"source\"\u003EIEEE International Workshop on Robot and Human Interactive Communication (ROMAN)\u003C\u002Fspan\u003E, (\u003Cspan class=\"year\"\u003E2005\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E537\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E542\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for IEEE International Workshop on Robot and Human Interactive Communication (ROMAN)' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=IEEE+International+Workshop+on+Robot+and+Human+Interactive+Communication+(ROMAN)&author=Desai+M.&author=Yanco+H.+A.&publication+year=2005&pages=537-542\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=IEEE+International+Workshop+on+Robot+and+Human+Interactive+Communication+(ROMAN)&author=Desai+M.&author=Yanco+H.+A.&publication+year=2005&pages=537-542",openUrlParams:{genre:o,btitle:am,title:am,atitle:"Blending Human and Robot Inputs for Sliding Scale Autonomy",aulast:a,aufirst:a,au:a,pub:f,date:an,spage:"537",epage:"542",doi:a,sid:d},innerRefId:"r83",title:am,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref84",displayNumber:"[84]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EPitzer\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EStyer\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBersch\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EC.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EDuHadway\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EC.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBecker\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ETowards Perceptual Shared Autonomy for Robotic Mobile Manipulation\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EIEEE International Conference on Robotics and Automation\u003C\u002Fem\u003E, ( \u003Cspan class=\"year\"\u003E2011\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E6245\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E6251\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Towards Perceptual Shared Autonomy for Robotic Mobile Manipulation' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FICRA.2011.5980259\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Towards Perceptual Shared Autonomy for Robotic Mobile Manipulation' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Towards+Perceptual+Shared+Autonomy+for+Robotic+Mobile+Manipulation&author=Pitzer+B.&author=Styer+M.&author=Bersch+C.&author=DuHadway+C.&author=Becker+J.&publication+year=2011\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Towards+Perceptual+Shared+Autonomy+for+Robotic+Mobile+Manipulation&author=Pitzer+B.&author=Styer+M.&author=Bersch+C.&author=DuHadway+C.&author=Becker+J.&publication+year=2011",openUrlParams:{genre:h,date:cn,sid:d,title:dm},innerRefId:"r84",title:dm,doi:"10.1109\u002FICRA.2011.5980259",crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FICRA.2011.5980259",pubMedLink:a}]},{id:"ref85",displayNumber:"[85]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESellner\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESimmons\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESingh\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"chapter-title\"\u003EUser Modelling for Principled Sliding Autonomy in Human-Robot Teams\u003C\u002Fspan\u003E,” In: \u003Cspan class=\"source\"\u003EMulti-Robot Systems. From Swarms to Intelligent Automata\u003C\u002Fspan\u003E. vol. \u003Cspan class=\"volume\"\u003E3\u003C\u002Fspan\u003E (\u003Cspan class=\"publisher-name\"\u003ESpringer\u003C\u002Fspan\u003E, \u003Cspan class=\"year\"\u003E2005\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E197\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E208\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Multi-Robot Systems. From Swarms to Intelligent Automata' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Multi-Robot+Systems.+From+Swarms+to+Intelligent+Automata&author=Sellner+B.&author=Simmons+R.&author=Singh+S.&publication+year=2005&pages=197-208\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Multi-Robot+Systems.+From+Swarms+to+Intelligent+Automata&author=Sellner+B.&author=Simmons+R.&author=Singh+S.&publication+year=2005&pages=197-208",openUrlParams:{genre:o,btitle:ao,title:ao,atitle:"User Modelling for Principled Sliding Autonomy in Human-Robot Teams",aulast:a,aufirst:a,au:a,pub:B,date:an,spage:"197",epage:"208",doi:a,sid:d},innerRefId:"r85",title:ao,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref86",displayNumber:"[86]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EDragan\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA. D.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESrinivasa\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES. S.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EA Policy-Blending Formalism for Shared Control\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EInt J Robot Res\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E32\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E7\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E790\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E805\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2013\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for A Policy-Blending Formalism for Shared Control' href=https:\u002F\u002Fdx.doi.org\u002F10.1177\u002F0278364913490324\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for A Policy-Blending Formalism for Shared Control' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+Policy-Blending+Formalism+for+Shared+Control&author=Dragan+A.+D.&author=Srinivasa+S.+S.&publication+year=2013&journal=Int+J+Robot+Res&volume=32&doi=10.1177%2F0278364913490324&pages=790-805\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+Policy-Blending+Formalism+for+Shared+Control&author=Dragan+A.+D.&author=Srinivasa+S.+S.&publication+year=2013&journal=Int+J+Robot+Res&volume=32&doi=10.1177%2F0278364913490324&pages=790-805",openUrlParams:{genre:e,atitle:dn,jtitle:w,title:w,volume:"32",artnum:"65f99c3d442f7c0001a34817",spage:"790",epage:"805",date:Q,sid:d,aulast:a,aufirst:a,doi:do0,au:a},innerRefId:"r86",title:dn,doi:do0,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1177\u002F0278364913490324",pubMedLink:a}]},{id:"ref87",displayNumber:"[87]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EJavdani\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESrinivasa\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES. S.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBagnell\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ. A.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"chapter-title\"\u003EShared Autonomy Via Hindsight Optimization\u003C\u002Fspan\u003E,” In: \u003Cspan class=\"source\"\u003ERobotics Science and Systems\u003C\u002Fspan\u003E, (\u003Cspan class=\"publisher-name\"\u003ENIH Public Access\u003C\u002Fspan\u003E, \u003Cspan class=\"year\"\u003E2015\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Robotics Science and Systems' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Robotics+Science+and+Systems&author=Javdani+S.&author=Srinivasa+S.+S.&author=Bagnell+J.+A.&publication+year=2015\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Robotics+Science+and+Systems&author=Javdani+S.&author=Srinivasa+S.+S.&author=Bagnell+J.+A.&publication+year=2015",openUrlParams:{genre:o,btitle:ap,title:ap,atitle:"Shared Autonomy Via Hindsight Optimization",aulast:a,aufirst:a,au:a,pub:"NIH Public Access",date:C,spage:f,epage:f,doi:a,sid:d},innerRefId:"r87",title:ap,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref88",displayNumber:"[88]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EAarno\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EEkvall\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EKragic\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EAdaptive virtual fixtures for machine-assisted teleoperation tasks\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EIEEE International Conference on Robotics and Automation\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2005\u003C\u002Fspan\u003E) pp\u003Cspan class=\"fpage\"\u003E1139\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E1144\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Adaptive virtual fixtures for machine-assisted teleoperation tasks' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Adaptive+virtual+fixtures+for+machine-assisted+teleoperation+tasks&author=Aarno+D.&author=Ekvall+S.&author=Kragic+D.&publication+year=2005\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Adaptive+virtual+fixtures+for+machine-assisted+teleoperation+tasks&author=Aarno+D.&author=Ekvall+S.&author=Kragic+D.&publication+year=2005",openUrlParams:{genre:h,date:an,sid:d,title:dp},innerRefId:"r88",title:dp,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref89",displayNumber:"[89]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECrandall\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ. W.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGoodrich\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM. A.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E. \u003Cspan class=\"article-title\"\u003ECharacterizing Efficiency of Human Robot Interaction: A Case Study of Shared-Control teleoperation\u003C\u002Fspan\u003E. In: \u003Cem class=\"italic\"\u003EIEEE\u002FRSJ International Conference on Intelligent Robots and Systems\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2002\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E1290\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E1295\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Characterizing Efficiency of Human Robot Interaction: A Case Study of Shared-Control teleoperation' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Characterizing+Efficiency+of+Human+Robot+Interaction%3A+A+Case+Study+of+Shared-Control+teleoperation&author=Crandall+J.+W.&author=Goodrich+M.+A.&publication+year=2002\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Characterizing+Efficiency+of+Human+Robot+Interaction%3A+A+Case+Study+of+Shared-Control+teleoperation&author=Crandall+J.+W.&author=Goodrich+M.+A.&publication+year=2002",openUrlParams:{genre:h,date:Y,sid:d,title:dq},innerRefId:"r89",title:dq,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref90",displayNumber:"[90]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESelvaggio\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EAbi-Farraj\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EPacchierotti\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EC.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGiordano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EP. R.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EHaptic-based shared-control methods for a dual-arm system\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Robot Auto Lett\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E3\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E4\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E4249\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E4256\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2018\u003C\u002Fspan\u003Ea).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Haptic-based shared-control methods for a dual-arm system' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2018.2864353\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Haptic-based shared-control methods for a dual-arm system' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Haptic-based+shared-control+methods+for+a+dual-arm+system&author=Selvaggio+M.&author=Abi-Farraj+F.&author=Pacchierotti+C.&author=Giordano+P.+R.&author=Siciliano+B.&publication+year=2018&journal=IEEE+Robot+Auto+Lett&volume=3&doi=10.1109%2FLRA.2018.2864353&pages=4249-4256\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Haptic-based+shared-control+methods+for+a+dual-arm+system&author=Selvaggio+M.&author=Abi-Farraj+F.&author=Pacchierotti+C.&author=Giordano+P.+R.&author=Siciliano+B.&publication+year=2018&journal=IEEE+Robot+Auto+Lett&volume=3&doi=10.1109%2FLRA.2018.2864353&pages=4249-4256",openUrlParams:{genre:e,atitle:dr,jtitle:i,title:i,volume:z,artnum:"65f99c3d442f7c0001a3481b",spage:"4249",epage:"4256",date:n,sid:d,aulast:a,aufirst:a,doi:ds,au:a},innerRefId:"r90",title:dr,doi:ds,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2018.2864353",pubMedLink:a}]},{id:"ref91",displayNumber:"[91]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESelvaggio\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGiordano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EP. R.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFicuciello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E. \u003Cspan class=\"article-title\"\u003EPassive task-prioritized shared-control teleoperation with haptic guidance\u003C\u002Fspan\u003E. In: \u003Cem class=\"italic\"\u003EInternational Conference on Robotics and Automation (ICRA)\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2019\u003C\u002Fspan\u003Ec) pp. \u003Cspan class=\"fpage\"\u003E430\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E436\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Passive task-prioritized shared-control teleoperation with haptic guidance' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FICRA.2019.8794197\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Passive task-prioritized shared-control teleoperation with haptic guidance' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Passive+task-prioritized+shared-control+teleoperation+with+haptic+guidance&author=Selvaggio+M.&author=Giordano+P.+R.&author=Ficuciello+F.&author=Siciliano+B.&publication+year=2019\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Passive+task-prioritized+shared-control+teleoperation+with+haptic+guidance&author=Selvaggio+M.&author=Giordano+P.+R.&author=Ficuciello+F.&author=Siciliano+B.&publication+year=2019",openUrlParams:{genre:h,date:k,sid:d,title:dt},innerRefId:"r91",title:dt,doi:"10.1109\u002FICRA.2019.8794197",crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FICRA.2019.8794197",pubMedLink:a}]},{id:"ref92",displayNumber:"[92]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESelvaggio\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFontanelli\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG. A.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFicuciello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EL.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EVillani\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EPassive virtual fixtures adaptation in minimally invasive robotic surgery\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Robot Auto Lett\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E3\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E4\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E3129\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E3136\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2018\u003C\u002Fspan\u003Eb).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Passive virtual fixtures adaptation in minimally invasive robotic surgery' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2018.2849876\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Passive virtual fixtures adaptation in minimally invasive robotic surgery' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Passive+virtual+fixtures+adaptation+in+minimally+invasive+robotic+surgery&author=Selvaggio+M.&author=Fontanelli+G.+A.&author=Ficuciello+L.&author=Villani+F.&author=Siciliano+B.&publication+year=2018&journal=IEEE+Robot+Auto+Lett&volume=3&doi=10.1109%2FLRA.2018.2849876&pages=3129-3136\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Passive+virtual+fixtures+adaptation+in+minimally+invasive+robotic+surgery&author=Selvaggio+M.&author=Fontanelli+G.+A.&author=Ficuciello+L.&author=Villani+F.&author=Siciliano+B.&publication+year=2018&journal=IEEE+Robot+Auto+Lett&volume=3&doi=10.1109%2FLRA.2018.2849876&pages=3129-3136",openUrlParams:{genre:e,atitle:du,jtitle:i,title:i,volume:z,artnum:"65f99c3d442f7c0001a3481d",spage:"3129",epage:"3136",date:n,sid:d,aulast:a,aufirst:a,doi:dv,au:a},innerRefId:"r92",title:du,doi:dv,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2018.2849876",pubMedLink:a}]},{id:"ref93",displayNumber:"[93]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESelvaggio\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ENotomista\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EChen\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGao\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ETrapani\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECaldwell\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E. \u003Cspan class=\"article-title\"\u003EEnhancing bilateral teleoperation using camera-based online virtual fixtures generation\u003C\u002Fspan\u003E. In: \u003Cem class=\"italic\"\u003EIEEE\u002FRSJ International Conference on Intelligent Robots and Systems (IROS)\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2016\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E1483\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E1488\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Enhancing bilateral teleoperation using camera-based online virtual fixtures generation' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Enhancing+bilateral+teleoperation+using+camera-based+online+virtual+fixtures+generation&author=Selvaggio+M.&author=Notomista+G.&author=Chen+F.&author=Gao+B.&author=Trapani+F.&author=Caldwell+D.&publication+year=2016\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Enhancing+bilateral+teleoperation+using+camera-based+online+virtual+fixtures+generation&author=Selvaggio+M.&author=Notomista+G.&author=Chen+F.&author=Gao+B.&author=Trapani+F.&author=Caldwell+D.&publication+year=2016",openUrlParams:{genre:h,date:v,sid:d,title:dw},innerRefId:"r93",title:dw,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref94",displayNumber:"[94]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESelvaggio\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EA.Ghalamzan\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EE.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMoccia\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFicuciello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E. \u003Cspan class=\"article-title\"\u003EHaptic-guided shared control for needle grasping optimization in minimally invasive robotic surgery\u003C\u002Fspan\u003E. In: \u003Cem class=\"italic\"\u003EIEEE\u002FRSJ International Conference on Intelligent Robots and Systems (IROS)\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2019\u003C\u002Fspan\u003Eb) pp. \u003Cspan class=\"fpage\"\u003E3617\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E3623\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Haptic-guided shared control for needle grasping optimization in minimally invasive robotic surgery' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FIROS40897.2019.8968109\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Haptic-guided shared control for needle grasping optimization in minimally invasive robotic surgery' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Haptic-guided+shared+control+for+needle+grasping+optimization+in+minimally+invasive+robotic+surgery&author=Selvaggio+M.&author=A.Ghalamzan+E.&author=Moccia+R.&author=Ficuciello+F.&author=Siciliano+B.&publication+year=2019\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Haptic-guided+shared+control+for+needle+grasping+optimization+in+minimally+invasive+robotic+surgery&author=Selvaggio+M.&author=A.Ghalamzan+E.&author=Moccia+R.&author=Ficuciello+F.&author=Siciliano+B.&publication+year=2019",openUrlParams:{genre:h,date:k,sid:d,title:dx},innerRefId:"r94",title:dx,doi:"10.1109\u002FIROS40897.2019.8968109",crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FIROS40897.2019.8968109",pubMedLink:a}]},{id:"ref95",displayNumber:"[95]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERodriguez-Guerra\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESorrosal\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECabanes\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EI.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECalleja\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EC.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EHuman-robot interaction review: Challenges and solutions for modern industrial environments\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Access\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E9\u003C\u002Fspan\u003E, \u003Cspan class=\"fpage\"\u003E108557\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E108578\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2021\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Human-robot interaction review: Challenges and solutions for modern industrial environments' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FACCESS.2021.3099287\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Human-robot interaction review: Challenges and solutions for modern industrial environments' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Human-robot+interaction+review%3A+Challenges+and+solutions+for+modern+industrial+environments&author=Rodriguez-Guerra+D.&author=Sorrosal+G.&author=Cabanes+I.&author=Calleja+C.&publication+year=2021&journal=IEEE+Access&volume=9&doi=10.1109%2FACCESS.2021.3099287&pages=108557-108578\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Human-robot+interaction+review%3A+Challenges+and+solutions+for+modern+industrial+environments&author=Rodriguez-Guerra+D.&author=Sorrosal+G.&author=Cabanes+I.&author=Calleja+C.&publication+year=2021&journal=IEEE+Access&volume=9&doi=10.1109%2FACCESS.2021.3099287&pages=108557-108578",openUrlParams:{genre:e,atitle:dy,jtitle:dz,title:dz,volume:O,artnum:"65f99c3d442f7c0001a34820",spage:"108557",epage:"108578",date:t,sid:d,aulast:a,aufirst:a,doi:dA,au:a},innerRefId:"r95",title:dy,doi:dA,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FACCESS.2021.3099287",pubMedLink:a}]},{id:"ref96",displayNumber:"[96]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESchultheis\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EH.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECooper\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER. P.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"article-title\"\u003EEveryday activities\u003C\u002Fspan\u003E, (\u003Cspan class=\"year\"\u003E2022\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Everyday activities' href=https:\u002F\u002Fdx.doi.org\u002F10.1111\u002Ftops.12603\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Everyday activities' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Everyday+activities&author=Schultheis+H.&author=Cooper+R.+P.&publication+year=2022\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Everyday+activities&author=Schultheis+H.&author=Cooper+R.+P.&publication+year=2022",openUrlParams:{genre:h,date:j,sid:d,title:dB},innerRefId:"r96",title:dB,doi:"10.1111\u002Ftops.12603",crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1111\u002Ftops.12603",pubMedLink:a}]},{id:"ref97",displayNumber:"[97]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBeetz\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBeßler\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EHaidu\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EPomarlan\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBozcuoğlu\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA. K.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBartels\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EKnow rob 2.0–a 2nd Generation Knowledge Processing Framework for Cognition-Enabled Robotic Agents\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EIEEE International Conference on Robotics and Automation (ICRA) 2018\u003C\u002Fem\u003E, \u003Cspan class=\"publisher-name\"\u003EIEEE\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2018\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E512\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E519\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Know rob 2.0–a 2nd Generation Knowledge Processing Framework for Cognition-Enabled Robotic Agents' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Know+rob+2.0%E2%80%93a+2nd+Generation+Knowledge+Processing+Framework+for+Cognition-Enabled+Robotic+Agents&author=Beetz+M.&author=Be%C3%9Fler+D.&author=Haidu+A.&author=Pomarlan+M.&author=Bozcuo%C4%9Flu+A.+K.&author=Bartels+G.&publication+year=2018\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Know+rob+2.0%E2%80%93a+2nd+Generation+Knowledge+Processing+Framework+for+Cognition-Enabled+Robotic+Agents&author=Beetz+M.&author=Be%C3%9Fler+D.&author=Haidu+A.&author=Pomarlan+M.&author=Bozcuo%C4%9Flu+A.+K.&author=Bartels+G.&publication+year=2018",openUrlParams:{genre:h,date:n,sid:d,title:dC},innerRefId:"r97",title:dC,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref98",displayNumber:"[98]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELemaignan\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EWarnier\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESisbot\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EE. A.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EClodic\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EAlami\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EArtificial cognition for social human–robot interaction: An implementation\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EArtif Intell\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E247\u003C\u002Fspan\u003E, \u003Cspan class=\"fpage\"\u003E45\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E69\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2017\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Artificial cognition for social human–robot interaction: An implementation' href=https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.artint.2016.07.002\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Artificial cognition for social human–robot interaction: An implementation' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Artificial+cognition+for+social+human%E2%80%93robot+interaction%3A+An+implementation&author=Lemaignan+S.&author=Warnier+M.&author=Sisbot+E.+A.&author=Clodic+A.&author=Alami+R.&publication+year=2017&journal=Artif+Intell&volume=247&doi=10.1016%2Fj.artint.2016.07.002&pages=45-69\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Artificial+cognition+for+social+human%E2%80%93robot+interaction%3A+An+implementation&author=Lemaignan+S.&author=Warnier+M.&author=Sisbot+E.+A.&author=Clodic+A.&author=Alami+R.&publication+year=2017&journal=Artif+Intell&volume=247&doi=10.1016%2Fj.artint.2016.07.002&pages=45-69",openUrlParams:{genre:e,atitle:dD,jtitle:dE,title:dE,volume:"247",artnum:"65f99c3d442f7c0001a34823",spage:"45",epage:aX,date:A,sid:d,aulast:a,aufirst:a,doi:dF,au:a},innerRefId:"r98",title:dD,doi:dF,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.artint.2016.07.002",pubMedLink:a}]},{id:"ref99",displayNumber:"[99]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBeßler\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EPorzel\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EPomarlan\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBeetz\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMalaka\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBateman\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"chapter-title\"\u003EA Formal Model of Affordances for Flexible Robotic Task Execution\u003C\u002Fspan\u003E,” In: \u003Cspan class=\"source\"\u003EECAI\u003C\u002Fspan\u003E, (\u003Cspan class=\"publisher-name\"\u003EIOS Press\u003C\u002Fspan\u003E, \u003Cspan class=\"year\"\u003E2020\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E2425\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E2432\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for ECAI' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=ECAI&author=Be%C3%9Fler+D.&author=Porzel+R.&author=Pomarlan+M.&author=Beetz+M.&author=Malaka+R.&author=Bateman+J.&publication+year=2020&pages=2425-2432\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=ECAI&author=Be%C3%9Fler+D.&author=Porzel+R.&author=Pomarlan+M.&author=Beetz+M.&author=Malaka+R.&author=Bateman+J.&publication+year=2020&pages=2425-2432",openUrlParams:{genre:o,btitle:aq,title:aq,atitle:"A Formal Model of Affordances for Flexible Robotic Task Execution",aulast:a,aufirst:a,au:a,pub:"IOS Press",date:l,spage:"2425",epage:"2432",doi:a,sid:d},innerRefId:"r99",title:aq,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref100",displayNumber:"[100]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003Ede la Cruz\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EP.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EPiater\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESaveriano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EReconfigurable Behavior Trees: Towards an Executive framework meeting high-level decision making and control layer features\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EIEEE International Conference on Systems, Man, and Cybernetics (SMC) 2020\u003C\u002Fem\u003E, (\u003Cspan class=\"publisher-name\"\u003EIEEE\u003C\u002Fspan\u003E, \u003Cspan class=\"year\"\u003E2020\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E1915\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E1922\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Reconfigurable Behavior Trees: Towards an Executive framework meeting high-level decision making and control layer features' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FSMC42975.2020.9282817\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Reconfigurable Behavior Trees: Towards an Executive framework meeting high-level decision making and control layer features' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Reconfigurable+Behavior+Trees%3A+Towards+an+Executive+framework+meeting+high-level+decision+making+and+control+layer+features&author=de+la+Cruz+P.&author=Piater+J.&author=Saveriano+M.&publication+year=2020\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Reconfigurable+Behavior+Trees%3A+Towards+an+Executive+framework+meeting+high-level+decision+making+and+control+layer+features&author=de+la+Cruz+P.&author=Piater+J.&author=Saveriano+M.&publication+year=2020",openUrlParams:{genre:h,date:l,sid:d,title:dG},innerRefId:"r100",title:dG,doi:"10.1109\u002FSMC42975.2020.9282817",crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FSMC42975.2020.9282817",pubMedLink:a}]},{id:"ref101",displayNumber:"[101]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECarbone\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFinzi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EOrlandini\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EPirri\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EModel-based control architecture for attentive robots in rescue scenarios\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EAuton Robot\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E24\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E1\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E87\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E120\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2008\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Model-based control architecture for attentive robots in rescue scenarios' href=https:\u002F\u002Fdx.doi.org\u002F10.1007\u002Fs10514-007-9055-6\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Model-based control architecture for attentive robots in rescue scenarios' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Model-based+control+architecture+for+attentive+robots+in+rescue+scenarios&author=Carbone+A.&author=Finzi+A.&author=Orlandini+A.&author=Pirri+F.&publication+year=2008&journal=Auton+Robot&volume=24&doi=10.1007%2Fs10514-007-9055-6&pages=87-120\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Model-based+control+architecture+for+attentive+robots+in+rescue+scenarios&author=Carbone+A.&author=Finzi+A.&author=Orlandini+A.&author=Pirri+F.&publication+year=2008&journal=Auton+Robot&volume=24&doi=10.1007%2Fs10514-007-9055-6&pages=87-120",openUrlParams:{genre:e,atitle:dH,jtitle:x,title:x,volume:dI,artnum:"65f99c3d442f7c0001a34826",spage:dJ,epage:"120",date:G,sid:d,aulast:a,aufirst:a,doi:dK,au:a},innerRefId:"r101",title:dH,doi:dK,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1007\u002Fs10514-007-9055-6",pubMedLink:a}]},{id:"ref102",displayNumber:"[102]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EKarpas\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EE.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELevine\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES. J.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EYu\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EP.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EWilliams\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB. C.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"chapter-title\"\u003ERobust Execution of Plans for Human-Robot Teams\u003C\u002Fspan\u003E,” In: \u003Cspan class=\"source\"\u003EICAPS-2015\u003C\u002Fspan\u003E, (\u003Cspan class=\"year\"\u003E2015\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E342\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E346\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for ICAPS-2015' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=ICAPS-2015&author=Karpas+E.&author=Levine+S.+J.&author=Yu+P.&author=Williams+B.+C.&publication+year=2015&pages=342-346\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=ICAPS-2015&author=Karpas+E.&author=Levine+S.+J.&author=Yu+P.&author=Williams+B.+C.&publication+year=2015&pages=342-346",openUrlParams:{genre:o,btitle:ar,title:ar,atitle:"Robust Execution of Plans for Human-Robot Teams",aulast:a,aufirst:a,au:a,pub:f,date:C,spage:"342",epage:"346",doi:a,sid:d},innerRefId:"r102",title:ar,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref103",displayNumber:"[103]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBotvinick\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM. M.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBraver\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ET. S.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBarch\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED. M.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECarter\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EC. S.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECohen\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ. D.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EConflict monitoring and cognitive control\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EPsychol Rev\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E108\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E3\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E624\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E652\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2001\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Conflict monitoring and cognitive control' href=https:\u002F\u002Fdx.doi.org\u002F10.1037\u002F0033-295X.108.3.624\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Conflict monitoring and cognitive control' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Conflict+monitoring+and+cognitive+control&author=Botvinick+M.+M.&author=Braver+T.+S.&author=Barch+D.+M.&author=Carter+C.+S.&author=Cohen+J.+D.&publication+year=2001&journal=Psychol+Rev&volume=108&doi=10.1037%2F0033-295X.108.3.624&pages=624-652\u003EGoogle Scholar\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='PubMed link for Conflict monitoring and cognitive control' href=https:\u002F\u002Fwww.ncbi.nlm.nih.gov\u002Fpubmed\u002F11488380\u003EPubMed\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Conflict+monitoring+and+cognitive+control&author=Botvinick+M.+M.&author=Braver+T.+S.&author=Barch+D.+M.&author=Carter+C.+S.&author=Cohen+J.+D.&publication+year=2001&journal=Psychol+Rev&volume=108&doi=10.1037%2F0033-295X.108.3.624&pages=624-652",openUrlParams:{genre:e,atitle:dL,jtitle:T,title:T,volume:"108",artnum:"65f99c3d442f7c0001a34828",spage:"624",epage:"652",date:dM,sid:d,aulast:a,aufirst:a,doi:dN,au:a},innerRefId:"r103",title:dL,pubMedId:"11488380",doi:dN,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1037\u002F0033-295X.108.3.624",pubMedLink:"https:\u002F\u002Fwww.ncbi.nlm.nih.gov\u002Fpubmed\u002F11488380"}]},{id:"ref104",displayNumber:"[104]",existInContent:c,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECooper\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EShallice\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ET.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EContention scheduling and the control of routine activities\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003ECogn Neuropsychol\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E17\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E4\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E297\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E338\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2000\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Contention scheduling and the control of routine activities' href=https:\u002F\u002Fdx.doi.org\u002F10.1080\u002F026432900380427\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Contention scheduling and the control of routine activities' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Contention+scheduling+and+the+control+of+routine+activities&author=Cooper+R.&author=Shallice+T.&publication+year=2000&journal=Cogn+Neuropsychol&volume=17&doi=10.1080%2F026432900380427&pages=297-338\u003EGoogle Scholar\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='PubMed link for Contention scheduling and the control of routine activities' href=https:\u002F\u002Fwww.ncbi.nlm.nih.gov\u002Fpubmed\u002F20945185\u003EPubMed\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Contention+scheduling+and+the+control+of+routine+activities&author=Cooper+R.&author=Shallice+T.&publication+year=2000&journal=Cogn+Neuropsychol&volume=17&doi=10.1080%2F026432900380427&pages=297-338",openUrlParams:{genre:e,atitle:dO,jtitle:dP,title:dP,volume:"17",artnum:"65f99c3d442f7c0001a34829",spage:bQ,epage:"338",date:dj,sid:d,aulast:a,aufirst:a,doi:dQ,au:a},innerRefId:"r104",title:dO,pubMedId:"20945185",doi:dQ,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1080\u002F026432900380427",pubMedLink:"https:\u002F\u002Fwww.ncbi.nlm.nih.gov\u002Fpubmed\u002F20945185"}]},{id:"ref105",displayNumber:"[105]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECooper\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EShallice\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ET.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EHierarchical schemas and goals in the control of sequential behavior\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EPsychol Rev\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E113\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E4\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E887\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E916\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2006\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Hierarchical schemas and goals in the control of sequential behavior' href=https:\u002F\u002Fdx.doi.org\u002F10.1037\u002F0033-295X.113.4.887\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Hierarchical schemas and goals in the control of sequential behavior' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Hierarchical+schemas+and+goals+in+the+control+of+sequential+behavior&author=Cooper+R.&author=Shallice+T.&publication+year=2006&journal=Psychol+Rev&volume=113&doi=10.1037%2F0033-295X.113.4.887&pages=887-916\u003EGoogle Scholar\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='PubMed link for Hierarchical schemas and goals in the control of sequential behavior' href=https:\u002F\u002Fwww.ncbi.nlm.nih.gov\u002Fpubmed\u002F17014307\u003EPubMed\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Hierarchical+schemas+and+goals+in+the+control+of+sequential+behavior&author=Cooper+R.&author=Shallice+T.&publication+year=2006&journal=Psychol+Rev&volume=113&doi=10.1037%2F0033-295X.113.4.887&pages=887-916",openUrlParams:{genre:e,atitle:dR,jtitle:T,title:T,volume:"113",artnum:"65f99c3d442f7c0001a3482a",spage:"887",epage:"916",date:"2006",sid:d,aulast:a,aufirst:a,doi:dS,au:a},innerRefId:"r105",title:dR,pubMedId:"17014307",doi:dS,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1037\u002F0033-295X.113.4.887",pubMedLink:"https:\u002F\u002Fwww.ncbi.nlm.nih.gov\u002Fpubmed\u002F17014307"}]},{id:"ref106",displayNumber:"[106]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EPetrík\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ETapaswi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELaptev\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EI.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESivic\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ELearning Object Manipulation Skills via Approximate State Estimation from Real Videos\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EConference on Robot Learning\u003C\u002Fem\u003E, (\u003Cspan class=\"publisher-name\"\u003EPMLR\u003C\u002Fspan\u003E, \u003Cspan class=\"year\"\u003E2021\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E296\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E312\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Learning Object Manipulation Skills via Approximate State Estimation from Real Videos' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FIROS47612.2022.9982084\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Learning Object Manipulation Skills via Approximate State Estimation from Real Videos' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Learning+Object+Manipulation+Skills+via+Approximate+State+Estimation+from+Real+Videos&author=Petr%C3%ADk+V.&author=Tapaswi+M.&author=Laptev+I.&author=Sivic+J.&publication+year=2021\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Learning+Object+Manipulation+Skills+via+Approximate+State+Estimation+from+Real+Videos&author=Petr%C3%ADk+V.&author=Tapaswi+M.&author=Laptev+I.&author=Sivic+J.&publication+year=2021",openUrlParams:{genre:h,date:t,sid:d,title:dT},innerRefId:"r106",title:dT,doi:"10.1109\u002FIROS47612.2022.9982084",crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FIROS47612.2022.9982084",pubMedLink:a}]},{id:"ref107",displayNumber:"[107]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERamirez-Amaro\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EK.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EYang\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EY.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECheng\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EA Survey on Semantic-Based Methods for the Understanding of Human Movements\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003ERobot Auton Syst\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E119\u003C\u002Fspan\u003E, \u003Cspan class=\"fpage\"\u003E31\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E50\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2019\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for A Survey on Semantic-Based Methods for the Understanding of Human Movements' href=https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.robot.2019.05.013\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for A Survey on Semantic-Based Methods for the Understanding of Human Movements' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+Survey+on+Semantic-Based+Methods+for+the+Understanding+of+Human+Movements&author=Ramirez-Amaro+K.&author=Yang+Y.&author=Cheng+G.&publication+year=2019&journal=Robot+Auton+Syst&volume=119&doi=10.1016%2Fj.robot.2019.05.013&pages=31-50\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+Survey+on+Semantic-Based+Methods+for+the+Understanding+of+Human+Movements&author=Ramirez-Amaro+K.&author=Yang+Y.&author=Cheng+G.&publication+year=2019&journal=Robot+Auton+Syst&volume=119&doi=10.1016%2Fj.robot.2019.05.013&pages=31-50",openUrlParams:{genre:e,atitle:dU,jtitle:r,title:r,volume:"119",artnum:"65f99c3d442f7c0001a3482c",spage:br,epage:"50",date:k,sid:d,aulast:a,aufirst:a,doi:dV,au:a},innerRefId:"r107",title:dU,doi:dV,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.robot.2019.05.013",pubMedLink:a}]},{id:"ref108",displayNumber:"[108]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMansouri\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EPecora\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESchüller\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EP.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ECombining task and motion planning: Challenges and guidelines\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EFront Robot AI\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E8\u003C\u002Fspan\u003E, \u003Cspan class=\"fpage\"\u003E637888\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2021\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Combining task and motion planning: Challenges and guidelines' href=https:\u002F\u002Fdx.doi.org\u002F10.3389\u002Ffrobt.2021.637888\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Combining task and motion planning: Challenges and guidelines' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Combining+task+and+motion+planning%3A+Challenges+and+guidelines&author=Mansouri+M.&author=Pecora+F.&author=Sch%C3%BCller+P.&publication+year=2021&journal=Front+Robot+AI&volume=8&doi=10.3389%2Ffrobt.2021.637888\u003EGoogle Scholar\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='PubMed link for Combining task and motion planning: Challenges and guidelines' href=https:\u002F\u002Fwww.ncbi.nlm.nih.gov\u002Fpubmed\u002F34095239\u003EPubMed\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Combining+task+and+motion+planning%3A+Challenges+and+guidelines&author=Mansouri+M.&author=Pecora+F.&author=Sch%C3%BCller+P.&publication+year=2021&journal=Front+Robot+AI&volume=8&doi=10.3389%2Ffrobt.2021.637888",openUrlParams:{genre:e,atitle:dW,jtitle:dX,title:dX,volume:$,artnum:"65f99c3d442f7c0001a3482d",spage:"637888",epage:f,date:t,sid:d,aulast:a,aufirst:a,doi:dY,au:a},innerRefId:"r108",title:dW,pubMedId:"34095239",doi:dY,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.3389\u002Ffrobt.2021.637888",pubMedLink:"https:\u002F\u002Fwww.ncbi.nlm.nih.gov\u002Fpubmed\u002F34095239"}]},{id:"ref109",displayNumber:"[109]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECacace\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECaccavale\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFinzi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EAttentional Multimodal Interface for Multidrone Search in the Alps\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EIEEE international conference on systems, man, and cybernetics (SMC)\u003C\u002Fem\u003E, \u003Cspan class=\"publisher-name\"\u003EIEEE\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2016\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E001178\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E001183\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Attentional Multimodal Interface for Multidrone Search in the Alps' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Attentional+Multimodal+Interface+for+Multidrone+Search+in+the+Alps&author=Cacace+J.&author=Caccavale+R.&author=Finzi+A.&author=Lippiello+V.&publication+year=2016\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Attentional+Multimodal+Interface+for+Multidrone+Search+in+the+Alps&author=Cacace+J.&author=Caccavale+R.&author=Finzi+A.&author=Lippiello+V.&publication+year=2016",openUrlParams:{genre:h,date:v,sid:d,title:dZ},innerRefId:"r109",title:dZ,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref110",displayNumber:"[110]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECaccavale\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECacace\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFiore\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EAlami\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFinzi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EAttentional Supervision of Human-Robot Collaborative Plans\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003E25th IEEE International Symposium on Robot and Human Interactive Communication (RO_MAN)\u003C\u002Fem\u003E, \u003Cspan class=\"publisher-name\"\u003EIEEE\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2016\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E867\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E873\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Attentional Supervision of Human-Robot Collaborative Plans' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Attentional+Supervision+of+Human-Robot+Collaborative+Plans&author=Caccavale+R.&author=Cacace+J.&author=Fiore+M.&author=Alami+R.&author=Finzi+A.&publication+year=2016\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Attentional+Supervision+of+Human-Robot+Collaborative+Plans&author=Caccavale+R.&author=Cacace+J.&author=Fiore+M.&author=Alami+R.&author=Finzi+A.&publication+year=2016",openUrlParams:{genre:h,date:v,sid:d,title:d_},innerRefId:"r110",title:d_,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref111",displayNumber:"[111]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECaccavale\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFinzi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EPlan Execution and Attentional Regulations for Flexible Human-Robot Interaction\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EIEEE International Conference on Systems, Man, and Cybernetics 2015\u003C\u002Fem\u003E, \u003Cspan class=\"publisher-name\"\u003EIEEE\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2015\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E2453\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E2458\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Plan Execution and Attentional Regulations for Flexible Human-Robot Interaction' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Plan+Execution+and+Attentional+Regulations+for+Flexible+Human-Robot+Interaction&author=Caccavale+R.&author=Finzi+A.&publication+year=2015\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Plan+Execution+and+Attentional+Regulations+for+Flexible+Human-Robot+Interaction&author=Caccavale+R.&author=Finzi+A.&publication+year=2015",openUrlParams:{genre:h,date:C,sid:d,title:d$},innerRefId:"r111",title:d$,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref112",displayNumber:"[112]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECaccavale\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFinzi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EFlexible task execution and attentional regulations in human-robot interaction\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Trans Cogn Develp Syst\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E9\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E1\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E68\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E79\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2016\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Flexible task execution and attentional regulations in human-robot interaction' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTCDS.2016.2614690\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Flexible task execution and attentional regulations in human-robot interaction' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Flexible+task+execution+and+attentional+regulations+in+human-robot+interaction&author=Caccavale+R.&author=Finzi+A.&publication+year=2016&journal=IEEE+Trans+Cogn+Develp+Syst&volume=9&doi=10.1109%2FTCDS.2016.2614690&pages=68-79\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Flexible+task+execution+and+attentional+regulations+in+human-robot+interaction&author=Caccavale+R.&author=Finzi+A.&publication+year=2016&journal=IEEE+Trans+Cogn+Develp+Syst&volume=9&doi=10.1109%2FTCDS.2016.2614690&pages=68-79",openUrlParams:{genre:e,atitle:ea,jtitle:eb,title:eb,volume:O,artnum:"65f99c3d442f7c0001a34831",spage:"68",epage:ec,date:v,sid:d,aulast:a,aufirst:a,doi:ed,au:a},innerRefId:"r112",title:ea,doi:ed,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTCDS.2016.2614690",pubMedLink:a}]},{id:"ref113",displayNumber:"[113]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECaccavale\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFinzi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"chapter-title\"\u003EToward a Cognitive Control Framework for Explainable Robotics\u003C\u002Fspan\u003E,” In: \u003Cspan class=\"source\"\u003EHuman-Friendly Robotics 2020: 13th International Workshop\u003C\u002Fspan\u003E, (\u003Cspan class=\"publisher-name\"\u003ESpringer\u003C\u002Fspan\u003E, \u003Cspan class=\"year\"\u003E2021\u003C\u002Fspan\u003E)\u003Cspan class=\"fpage\"\u003E46\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E58\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Human-Friendly Robotics 2020: 13th International Workshop' href=https:\u002F\u002Fdx.doi.org\u002F10.1007\u002F978-3-030-71356-0_4\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Human-Friendly Robotics 2020: 13th International Workshop' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Human-Friendly+Robotics+2020%3A+13th+International+Workshop&author=Caccavale+R.&author=Finzi+A.&publication+year=2021&pages=46-58\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Human-Friendly+Robotics+2020%3A+13th+International+Workshop&author=Caccavale+R.&author=Finzi+A.&publication+year=2021&pages=46-58",openUrlParams:{genre:o,btitle:as,title:as,atitle:"Toward a Cognitive Control Framework for Explainable Robotics",aulast:a,aufirst:a,au:a,pub:B,date:t,spage:"46",epage:"58",doi:ee,sid:d},innerRefId:"r113",title:as,doi:ee,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1007\u002F978-3-030-71356-0_4",pubMedLink:a}]},{id:"ref114",displayNumber:"[114]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECaccavale\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFinzi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EA robotic cognitive control framework for collaborative task execution and learning\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003ETop Cogn Sci\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E14\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E2\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E327\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E343\u003C\u002Fspan\u003E(\u003Cspan class=\"year\"\u003E2022\u003C\u002Fspan\u003Eb).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for A robotic cognitive control framework for collaborative task execution and learning' href=https:\u002F\u002Fdx.doi.org\u002F10.1111\u002Ftops.12587\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for A robotic cognitive control framework for collaborative task execution and learning' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+robotic+cognitive+control+framework+for+collaborative+task+execution+and+learning&author=Caccavale+R.&author=Finzi+A.&publication+year=2022&journal=Top+Cogn+Sci&volume=14&doi=10.1111%2Ftops.12587&pages=327-343\u003EGoogle Scholar\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='PubMed link for A robotic cognitive control framework for collaborative task execution and learning' href=https:\u002F\u002Fwww.ncbi.nlm.nih.gov\u002Fpubmed\u002F34826350\u003EPubMed\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+robotic+cognitive+control+framework+for+collaborative+task+execution+and+learning&author=Caccavale+R.&author=Finzi+A.&publication+year=2022&journal=Top+Cogn+Sci&volume=14&doi=10.1111%2Ftops.12587&pages=327-343",openUrlParams:{genre:e,atitle:ef,jtitle:eg,title:eg,volume:cg,artnum:"65f99c3d442f7c0001a34833",spage:"327",epage:"343",date:j,sid:d,aulast:a,aufirst:a,doi:eh,au:a},innerRefId:"r114",title:ef,pubMedId:"34826350",doi:eh,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1111\u002Ftops.12587",pubMedLink:"https:\u002F\u002Fwww.ncbi.nlm.nih.gov\u002Fpubmed\u002F34826350"}]},{id:"ref115",displayNumber:"[115]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECaccavale\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELeone\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EE.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELucignano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EL.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERossi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EStaffa\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFinzi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EAttentional Regulations in a Situated Human-Robot Dialogue\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EThe 23rd IEEE International Symposium on Robot and Human Interactive Communication\u003C\u002Fem\u003E, \u003Cspan class=\"publisher-name\"\u003EIEEE\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2014\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E844\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E849\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Attentional Regulations in a Situated Human-Robot Dialogue' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Attentional+Regulations+in+a+Situated+Human-Robot+Dialogue&author=Caccavale+R.&author=Leone+E.&author=Lucignano+L.&author=Rossi+S.&author=Staffa+M.&author=Finzi+A.&publication+year=2014\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Attentional+Regulations+in+a+Situated+Human-Robot+Dialogue&author=Caccavale+R.&author=Leone+E.&author=Lucignano+L.&author=Rossi+S.&author=Staffa+M.&author=Finzi+A.&publication+year=2014",openUrlParams:{genre:h,date:D,sid:d,title:ei},innerRefId:"r115",title:ei,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref116",displayNumber:"[116]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECaccavale\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFinzi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ELearning attentional regulations for structured tasks execution in robotic cognitive control\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EAuton Robot\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E43\u003C\u002Fspan\u003E, \u003Cspan class=\"fpage\"\u003E2229\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E2243\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2019\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Learning attentional regulations for structured tasks execution in robotic cognitive control' href=https:\u002F\u002Fdx.doi.org\u002F10.1007\u002Fs10514-019-09876-x\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Learning attentional regulations for structured tasks execution in robotic cognitive control' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Learning+attentional+regulations+for+structured+tasks+execution+in+robotic+cognitive+control&author=Caccavale+R.&author=Finzi+A.&publication+year=2019&journal=Auton+Robot&volume=43&doi=10.1007%2Fs10514-019-09876-x&pages=2229-2243\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Learning+attentional+regulations+for+structured+tasks+execution+in+robotic+cognitive+control&author=Caccavale+R.&author=Finzi+A.&publication+year=2019&journal=Auton+Robot&volume=43&doi=10.1007%2Fs10514-019-09876-x&pages=2229-2243",openUrlParams:{genre:e,atitle:ej,jtitle:x,title:x,volume:ek,artnum:"65f99c3d442f7c0001a34835",spage:"2229",epage:"2243",date:k,sid:d,aulast:a,aufirst:a,doi:el,au:a},innerRefId:"r116",title:ej,doi:el,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1007\u002Fs10514-019-09876-x",pubMedLink:a}]},{id:"ref117",displayNumber:"[117]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECaccavale\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESaveriano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFinzi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELee\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EKinesthetic teaching and attentional supervision of structured tasks in human–robot interaction\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EAuton Robot\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E43\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E6\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E1291\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E1307\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2019\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Kinesthetic teaching and attentional supervision of structured tasks in human–robot interaction' href=https:\u002F\u002Fdx.doi.org\u002F10.1007\u002Fs10514-018-9706-9\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Kinesthetic teaching and attentional supervision of structured tasks in human–robot interaction' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Kinesthetic+teaching+and+attentional+supervision+of+structured+tasks+in+human%E2%80%93robot+interaction&author=Caccavale+R.&author=Saveriano+M.&author=Finzi+A.&author=Lee+D.&publication+year=2019&journal=Auton+Robot&volume=43&doi=10.1007%2Fs10514-018-9706-9&pages=1291-1307\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Kinesthetic+teaching+and+attentional+supervision+of+structured+tasks+in+human%E2%80%93robot+interaction&author=Caccavale+R.&author=Saveriano+M.&author=Finzi+A.&author=Lee+D.&publication+year=2019&journal=Auton+Robot&volume=43&doi=10.1007%2Fs10514-018-9706-9&pages=1291-1307",openUrlParams:{genre:e,atitle:em,jtitle:x,title:x,volume:ek,artnum:"65f99c3d442f7c0001a34836",spage:"1291",epage:"1307",date:k,sid:d,aulast:a,aufirst:a,doi:en,au:a},innerRefId:"r117",title:em,doi:en,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1007\u002Fs10514-018-9706-9",pubMedLink:a}]},{id:"ref118",displayNumber:"[118]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECaccavale\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESaveriano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFontanelli\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG. A.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFicuciello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELee\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFinzi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EImitation Learning and Attentional Supervision of Dual-Arm Structured Tasks\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EJoint IEEE International Conference on Development and Learning and Epigenetic Robotics (ICDL-EpiRob)\u003C\u002Fem\u003E, \u003Cspan class=\"publisher-name\"\u003EIEEE\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2017\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E66\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E71\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Imitation Learning and Attentional Supervision of Dual-Arm Structured Tasks' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Imitation+Learning+and+Attentional+Supervision+of+Dual-Arm+Structured+Tasks&author=Caccavale+R.&author=Saveriano+M.&author=Fontanelli+G.+A.&author=Ficuciello+F.&author=Lee+D.&author=Finzi+A.&publication+year=2017\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Imitation+Learning+and+Attentional+Supervision+of+Dual-Arm+Structured+Tasks&author=Caccavale+R.&author=Saveriano+M.&author=Fontanelli+G.+A.&author=Ficuciello+F.&author=Lee+D.&author=Finzi+A.&publication+year=2017",openUrlParams:{genre:h,date:A,sid:d,title:eo},innerRefId:"r118",title:eo,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref119",displayNumber:"[119]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECaccavale\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EErmini\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFedeli\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EE.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFinzi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ETavano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EA multi-robot deep Q-learning framework for priority-based sanitization of railway stations\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EAppl Intell\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E53\u003C\u002Fspan\u003E, \u003Cspan class=\"fpage\"\u003E20595\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E20613\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2023\u003C\u002Fspan\u003Ea)\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for A multi-robot deep Q-learning framework for priority-based sanitization of railway stations' href=https:\u002F\u002Fdx.doi.org\u002F10.1007\u002Fs10489-023-04529-0\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for A multi-robot deep Q-learning framework for priority-based sanitization of railway stations' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+multi-robot+deep+Q-learning+framework+for+priority-based+sanitization+of+railway+stations&author=Caccavale+R.&author=Ermini+M.&author=Fedeli+E.&author=Finzi+A.&author=Lippiello+V.&author=Tavano+F.&publication+year=2023&journal=Appl+Intell&volume=53&doi=10.1007%2Fs10489-023-04529-0&pages=20595-20613\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+multi-robot+deep+Q-learning+framework+for+priority-based+sanitization+of+railway+stations&author=Caccavale+R.&author=Ermini+M.&author=Fedeli+E.&author=Finzi+A.&author=Lippiello+V.&author=Tavano+F.&publication+year=2023&journal=Appl+Intell&volume=53&doi=10.1007%2Fs10489-023-04529-0&pages=20595-20613",openUrlParams:{genre:e,atitle:ep,jtitle:eq,title:eq,volume:aV,artnum:"65f99c3d442f7c0001a34838",spage:"20595",epage:"20613",date:q,sid:d,aulast:a,aufirst:a,doi:er,au:a},innerRefId:"r119",title:ep,doi:er,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1007\u002Fs10489-023-04529-0",pubMedLink:a}]},{id:"ref120",displayNumber:"[120]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECaccavale\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EErmini\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFedeli\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EE.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFinzi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ETavano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"chapter-title\"\u003EToward a Heterogeneous Multi-Robot Framework for Priority-Based Sanitization of Railway Stations\u003C\u002Fspan\u003E,” In: \u003Cspan class=\"source\"\u003EAIxIA 2022–Advances in Artificial Intelligence: XXIst International Conference of the Italian Association for Artificial Intelligence, AIxIA 2022\u003C\u002Fspan\u003E. vol. \u003Cspan class=\"volume\"\u003E2023b\u003C\u002Fspan\u003E (\u003Cspan class=\"publisher-name\"\u003ESpringer\u003C\u002Fspan\u003E, \u003Cspan class=\"publisher-loc\"\u003EUdine, Italy\u003C\u002Fspan\u003E, \u003Cspan class=\"year\"\u003E2022\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E387\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E401\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for AIxIA 2022–Advances in Artificial Intelligence: XXIst International Conference of the Italian Association for Artificial Intelligence, AIxIA 2022' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=AIxIA+2022%E2%80%93Advances+in+Artificial+Intelligence%3A+XXIst+International+Conference+of+the+Italian+Association+for+Artificial+Intelligence%2C+AIxIA+2022&author=Caccavale+R.&author=Ermini+M.&author=Fedeli+E.&author=Finzi+A.&author=Lippiello+V.&author=Tavano+F.&publication+year=2022&pages=387-401\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=AIxIA+2022%E2%80%93Advances+in+Artificial+Intelligence%3A+XXIst+International+Conference+of+the+Italian+Association+for+Artificial+Intelligence%2C+AIxIA+2022&author=Caccavale+R.&author=Ermini+M.&author=Fedeli+E.&author=Finzi+A.&author=Lippiello+V.&author=Tavano+F.&publication+year=2022&pages=387-401",openUrlParams:{genre:o,btitle:at,title:at,atitle:"Toward a Heterogeneous Multi-Robot Framework for Priority-Based Sanitization of Railway Stations",aulast:a,aufirst:a,au:a,pub:B,date:j,spage:"387",epage:"401",doi:a,sid:d},innerRefId:"r120",title:at,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref121",displayNumber:"[121]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECaccavale\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFinzi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EA rapidly-exploring random trees approach to combined task and motion planning\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003ERobot Auton Syst\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E157\u003C\u002Fspan\u003E, \u003Cspan class=\"fpage\"\u003E104238\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2022\u003C\u002Fspan\u003Ea).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for A rapidly-exploring random trees approach to combined task and motion planning' href=https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.robot.2022.104238\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for A rapidly-exploring random trees approach to combined task and motion planning' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+rapidly-exploring+random+trees+approach+to+combined+task+and+motion+planning&author=Caccavale+R.&author=Finzi+A.&publication+year=2022&journal=Robot+Auton+Syst&volume=157&doi=10.1016%2Fj.robot.2022.104238\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+rapidly-exploring+random+trees+approach+to+combined+task+and+motion+planning&author=Caccavale+R.&author=Finzi+A.&publication+year=2022&journal=Robot+Auton+Syst&volume=157&doi=10.1016%2Fj.robot.2022.104238",openUrlParams:{genre:e,atitle:es,jtitle:r,title:r,volume:au,artnum:"65f99c3d442f7c0001a3483a",spage:"104238",epage:f,date:j,sid:d,aulast:a,aufirst:a,doi:et,au:a},innerRefId:"r121",title:es,doi:et,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.robot.2022.104238",pubMedLink:a}]},{id:"ref122",displayNumber:"[122]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ENorman\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED. A.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EShallice\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ET.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"article-title\"\u003EAttention to action: Willed and automatic control of behavior\u003C\u002Fspan\u003E, \u003Cspan class=\"volume\"\u003E4\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E1986\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Attention to action: Willed and automatic control of behavior' href=https:\u002F\u002Fdx.doi.org\u002F10.1007\u002F978-1-4757-0629-1_1\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Attention to action: Willed and automatic control of behavior' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Attention+to+action%3A+Willed+and+automatic+control+of+behavior&author=Norman+D.+A.&author=Shallice+T.&publication+year=1986\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Attention+to+action%3A+Willed+and+automatic+control+of+behavior&author=Norman+D.+A.&author=Shallice+T.&publication+year=1986",openUrlParams:{genre:h,date:"1986",sid:d,title:eu},innerRefId:"r122",title:eu,doi:"10.1007\u002F978-1-4757-0629-1_1",crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1007\u002F978-1-4757-0629-1_1",pubMedLink:a}]},{id:"ref123",displayNumber:"[123]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EEchelmeyer\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EW.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EKirchheim\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EWellbrock\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EE.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ERobotics-Logistics: Challenges for Automation of Logistic Processes\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EIEEE International Conference on Automation and Logistics 2008\u003C\u002Fem\u003E, (\u003Cspan class=\"publisher-name\"\u003EIEEE\u003C\u002Fspan\u003E, \u003Cspan class=\"year\"\u003E2008\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E2099\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E2103\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Robotics-Logistics: Challenges for Automation of Logistic Processes' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FICAL.2008.4636510\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Robotics-Logistics: Challenges for Automation of Logistic Processes' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Robotics-Logistics%3A+Challenges+for+Automation+of+Logistic+Processes&author=Echelmeyer+W.&author=Kirchheim+A.&author=Wellbrock+E.&publication+year=2008\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Robotics-Logistics%3A+Challenges+for+Automation+of+Logistic+Processes&author=Echelmeyer+W.&author=Kirchheim+A.&author=Wellbrock+E.&publication+year=2008",openUrlParams:{genre:h,date:G,sid:d,title:ev},innerRefId:"r123",title:ev,doi:"10.1109\u002FICAL.2008.4636510",crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FICAL.2008.4636510",pubMedLink:a}]},{id:"ref124",displayNumber:"[124]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESakamoto\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ET.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EHarada\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EK.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EWan\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EW.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EReal-time planning robotic palletizing tasks using reusable roadmaps\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EJ Robot, Network Art Life\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E6\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E4\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E240\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E245\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2020\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Real-time planning robotic palletizing tasks using reusable roadmaps' href=https:\u002F\u002Fdx.doi.org\u002F10.2991\u002Fjrnal.k.200222.009\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Real-time planning robotic palletizing tasks using reusable roadmaps' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Real-time+planning+robotic+palletizing+tasks+using+reusable+roadmaps&author=Sakamoto+T.&author=Harada+K.&author=Wan+W.&publication+year=2020&journal=J+Robot%2C+Network+Art+Life&volume=6&doi=10.2991%2Fjrnal.k.200222.009&pages=240-245\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Real-time+planning+robotic+palletizing+tasks+using+reusable+roadmaps&author=Sakamoto+T.&author=Harada+K.&author=Wan+W.&publication+year=2020&journal=J+Robot%2C+Network+Art+Life&volume=6&doi=10.2991%2Fjrnal.k.200222.009&pages=240-245",openUrlParams:{genre:e,atitle:ew,jtitle:ex,title:ex,volume:I,artnum:"65f99c3d442f7c0001a3483d",spage:"240",epage:"245",date:l,sid:d,aulast:a,aufirst:a,doi:ey,au:a},innerRefId:"r124",title:ew,doi:ey,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.2991\u002Fjrnal.k.200222.009",pubMedLink:a}]},{id:"ref125",displayNumber:"[125]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EJocas\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EKurrek\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EP.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EZoghlami\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGianni\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESalehi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E. \u003Cspan class=\"article-title\"\u003EAi-Based Learning Approach with Consideration of Safety Criteria on Example of a Depalletization Robot\u003C\u002Fspan\u003E. In: \u003Cem class=\"italic\"\u003EProceedings of the Design Society: International Conference on Engineering Design\u003C\u002Fem\u003E, (\u003Cspan class=\"publisher-name\"\u003ECambridge University Press\u003C\u002Fspan\u003E, \u003Cspan class=\"year\"\u003E2019\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E2041\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E2050\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Ai-Based Learning Approach with Consideration of Safety Criteria on Example of a Depalletization Robot' href=https:\u002F\u002Fdx.doi.org\u002F10.1017\u002Fdsi.2019.210\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Ai-Based Learning Approach with Consideration of Safety Criteria on Example of a Depalletization Robot' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Ai-Based+Learning+Approach+with+Consideration+of+Safety+Criteria+on+Example+of+a+Depalletization+Robot&author=Jocas+M.&author=Kurrek+P.&author=Zoghlami+F.&author=Gianni+M.&author=Salehi+V.&publication+year=2019\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Ai-Based+Learning+Approach+with+Consideration+of+Safety+Criteria+on+Example+of+a+Depalletization+Robot&author=Jocas+M.&author=Kurrek+P.&author=Zoghlami+F.&author=Gianni+M.&author=Salehi+V.&publication+year=2019",openUrlParams:{genre:h,date:k,sid:d,title:ez},innerRefId:"r125",title:ez,doi:"10.1017\u002Fdsi.2019.210",crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1017\u002Fdsi.2019.210",pubMedLink:a}]},{id:"ref126",displayNumber:"[126]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ENakamoto\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EH.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EEto\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EH.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESonoura\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ET.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ETanaka\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EOgawa\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E. \u003Cspan class=\"article-title\"\u003EHigh-Speed and Compact Depalletizing Robot Capable of Handling Packages Stacked Complicatedly\u003C\u002Fspan\u003E. In: \u003Cem class=\"italic\"\u003EIEEE\u002FRSJ International Conference on Intelligent Robots and Systems (IROS)\u003C\u002Fem\u003E, (\u003Cspan class=\"publisher-name\"\u003EIEEE\u003C\u002Fspan\u003E, \u003Cspan class=\"year\"\u003E2016\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E344\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E349\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for High-Speed and Compact Depalletizing Robot Capable of Handling Packages Stacked Complicatedly' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FIROS.2016.7759077\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for High-Speed and Compact Depalletizing Robot Capable of Handling Packages Stacked Complicatedly' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=High-Speed+and+Compact+Depalletizing+Robot+Capable+of+Handling+Packages+Stacked+Complicatedly&author=Nakamoto+H.&author=Eto+H.&author=Sonoura+T.&author=Tanaka+J.&author=Ogawa+A.&publication+year=2016\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=High-Speed+and+Compact+Depalletizing+Robot+Capable+of+Handling+Packages+Stacked+Complicatedly&author=Nakamoto+H.&author=Eto+H.&author=Sonoura+T.&author=Tanaka+J.&author=Ogawa+A.&publication+year=2016",openUrlParams:{genre:h,date:v,sid:d,title:eA},innerRefId:"r126",title:eA,doi:"10.1109\u002FIROS.2016.7759077",crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FIROS.2016.7759077",pubMedLink:a}]},{id:"ref127",displayNumber:"[127]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESchwarz\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMilan\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EPeriyasamy\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA. S.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBehnke\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ERgb-d object detection and semantic segmentation for autonomous manipulation in clutter\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EInt J Robot Res\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E37\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E4-5\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E437\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E451\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2018\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Rgb-d object detection and semantic segmentation for autonomous manipulation in clutter' href=https:\u002F\u002Fdx.doi.org\u002F10.1177\u002F0278364917713117\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Rgb-d object detection and semantic segmentation for autonomous manipulation in clutter' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Rgb-d+object+detection+and+semantic+segmentation+for+autonomous+manipulation+in+clutter&author=Schwarz+M.&author=Milan+A.&author=Periyasamy+A.+S.&author=Behnke+S.&publication+year=2018&journal=Int+J+Robot+Res&volume=37&doi=10.1177%2F0278364917713117&pages=437-451\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Rgb-d+object+detection+and+semantic+segmentation+for+autonomous+manipulation+in+clutter&author=Schwarz+M.&author=Milan+A.&author=Periyasamy+A.+S.&author=Behnke+S.&publication+year=2018&journal=Int+J+Robot+Res&volume=37&doi=10.1177%2F0278364917713117&pages=437-451",openUrlParams:{genre:e,atitle:eB,jtitle:w,title:w,volume:dh,artnum:"65f99c3d442f7c0001a34840",spage:"437",epage:"451",date:n,sid:d,aulast:a,aufirst:a,doi:eC,au:a},innerRefId:"r127",title:eB,doi:eC,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1177\u002F0278364917713117",pubMedLink:a}]},{id:"ref128",displayNumber:"[128]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EKatsoulas\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EKosmopoulos\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EAn Efficient Depalletizing System Based on 2d Range Imagery\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EProceedings 2001 ICRA. IEEE International Conference on Robotics and Automation (Cat. No.01CH37164)\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2001\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E305\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E312\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for An Efficient Depalletizing System Based on 2d Range Imagery' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=An+Efficient+Depalletizing+System+Based+on+2d+Range+Imagery&author=Katsoulas+D.&author=Kosmopoulos+D.&publication+year=2001\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=An+Efficient+Depalletizing+System+Based+on+2d+Range+Imagery&author=Katsoulas+D.&author=Kosmopoulos+D.&publication+year=2001",openUrlParams:{genre:h,date:dM,sid:d,title:eD},innerRefId:"r128",title:eD,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref129",displayNumber:"[129]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EKrug\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EStoyanov\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ET.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ETincani\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EAndreasson\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EH.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMosberger\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFantoni\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELilienthal\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA. J.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EThe next step in robot commissioning: Autonomous picking and palletizing\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Robot Auto Lett\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E1\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E1\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E546\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E553\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2016\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for The next step in robot commissioning: Autonomous picking and palletizing' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2016.2519944\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for The next step in robot commissioning: Autonomous picking and palletizing' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=The+next+step+in+robot+commissioning%3A+Autonomous+picking+and+palletizing&author=Krug+R.&author=Stoyanov+T.&author=Tincani+V.&author=Andreasson+H.&author=Mosberger+R.&author=Fantoni+G.&author=Lilienthal+A.+J.&publication+year=2016&journal=IEEE+Robot+Auto+Lett&volume=1&doi=10.1109%2FLRA.2016.2519944&pages=546-553\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=The+next+step+in+robot+commissioning%3A+Autonomous+picking+and+palletizing&author=Krug+R.&author=Stoyanov+T.&author=Tincani+V.&author=Andreasson+H.&author=Mosberger+R.&author=Fantoni+G.&author=Lilienthal+A.+J.&publication+year=2016&journal=IEEE+Robot+Auto+Lett&volume=1&doi=10.1109%2FLRA.2016.2519944&pages=546-553",openUrlParams:{genre:e,atitle:eE,jtitle:i,title:i,volume:E,artnum:"65f99c3d442f7c0001a34842",spage:"546",epage:"553",date:v,sid:d,aulast:a,aufirst:a,doi:eF,au:a},innerRefId:"r129",title:eE,doi:eF,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2016.2519944",pubMedLink:a}]},{id:"ref130",displayNumber:"[130]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ETanaka\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EOgawa\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ENakamoto\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EH.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESonoura\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ET.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EEto\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EH.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ESuction pad unit using a bellows pneumatic actuator as a support mechanism for an end effector of depalletizing robots\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EROBOMECH Journal\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E7\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E1\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E2\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2020\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Suction pad unit using a bellows pneumatic actuator as a support mechanism for an end effector of depalletizing robots' href=https:\u002F\u002Fdx.doi.org\u002F10.1186\u002Fs40648-019-0151-0\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Suction pad unit using a bellows pneumatic actuator as a support mechanism for an end effector of depalletizing robots' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Suction+pad+unit+using+a+bellows+pneumatic+actuator+as+a+support+mechanism+for+an+end+effector+of+depalletizing+robots&author=Tanaka+J.&author=Ogawa+A.&author=Nakamoto+H.&author=Sonoura+T.&author=Eto+H.&publication+year=2020&journal=ROBOMECH+Journal&volume=7&doi=10.1186%2Fs40648-019-0151-0\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Suction+pad+unit+using+a+bellows+pneumatic+actuator+as+a+support+mechanism+for+an+end+effector+of+depalletizing+robots&author=Tanaka+J.&author=Ogawa+A.&author=Nakamoto+H.&author=Sonoura+T.&author=Eto+H.&publication+year=2020&journal=ROBOMECH+Journal&volume=7&doi=10.1186%2Fs40648-019-0151-0",openUrlParams:{genre:e,atitle:eG,jtitle:eH,title:eH,volume:eI,artnum:"65f99c3d442f7c0001a34843",spage:aj,epage:f,date:l,sid:d,aulast:a,aufirst:a,doi:eJ,au:a},innerRefId:"r130",title:eG,doi:eJ,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1186\u002Fs40648-019-0151-0",pubMedLink:a}]},{id:"ref131",displayNumber:"[131]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMoura\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF. M.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESilva\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM. F.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E. \u003Cspan class=\"article-title\"\u003EApplication for Automatic Programming of Palletizing Robots\u003C\u002Fspan\u003E. In: \u003Cem class=\"italic\"\u003EIEEE International Conference on Autonomous Robot Systems and Competitions (ICARSC) 2018\u003C\u002Fem\u003E, (\u003Cspan class=\"publisher-name\"\u003EIEEE\u003C\u002Fspan\u003E, \u003Cspan class=\"year\"\u003E2018\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E48\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E53\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Application for Automatic Programming of Palletizing Robots' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FICARSC.2018.8374159\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Application for Automatic Programming of Palletizing Robots' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Application+for+Automatic+Programming+of+Palletizing+Robots&author=Moura+F.+M.&author=Silva+M.+F.&publication+year=2018\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Application+for+Automatic+Programming+of+Palletizing+Robots&author=Moura+F.+M.&author=Silva+M.+F.&publication+year=2018",openUrlParams:{genre:h,date:n,sid:d,title:eK},innerRefId:"r131",title:eK,doi:"10.1109\u002FICARSC.2018.8374159",crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FICARSC.2018.8374159",pubMedLink:a}]},{id:"ref132",displayNumber:"[132]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECaccavale\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EArpenti\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EP.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EPaduano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFontanellli\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EVillani\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EL.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EA flexible robotic depalletizing system for supermarket logistics\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Robot Auto Lett\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E5\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E3\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E4471\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E4476\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2020\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for A flexible robotic depalletizing system for supermarket logistics' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2020.3000427\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for A flexible robotic depalletizing system for supermarket logistics' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+flexible+robotic+depalletizing+system+for+supermarket+logistics&author=Caccavale+R.&author=Arpenti+P.&author=Paduano+G.&author=Fontanellli+A.&author=Lippiello+V.&author=Villani+L.&author=Siciliano+B.&publication+year=2020&journal=IEEE+Robot+Auto+Lett&volume=5&doi=10.1109%2FLRA.2020.3000427&pages=4471-4476\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+flexible+robotic+depalletizing+system+for+supermarket+logistics&author=Caccavale+R.&author=Arpenti+P.&author=Paduano+G.&author=Fontanellli+A.&author=Lippiello+V.&author=Villani+L.&author=Siciliano+B.&publication+year=2020&journal=IEEE+Robot+Auto+Lett&volume=5&doi=10.1109%2FLRA.2020.3000427&pages=4471-4476",openUrlParams:{genre:e,atitle:eL,jtitle:i,title:i,volume:J,artnum:"65f99c3d442f7c0001a34845",spage:"4471",epage:"4476",date:l,sid:d,aulast:a,aufirst:a,doi:eM,au:a},innerRefId:"r132",title:eL,doi:eM,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2020.3000427",pubMedLink:a}]},{id:"ref133",displayNumber:"[133]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EArpenti\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EP.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECaccavale\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EPaduano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFontanelli\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG. A.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EVillani\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EL.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ERgb-d recognition and localization of cases for robotic depalletizing in supermarkets\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Robotics and Automation Letters\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E5\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E4\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E6233\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E6238\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2020\u003C\u002Fspan\u003Ea).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Rgb-d recognition and localization of cases for robotic depalletizing in supermarkets' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2020.3013936\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Rgb-d recognition and localization of cases for robotic depalletizing in supermarkets' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Rgb-d+recognition+and+localization+of+cases+for+robotic+depalletizing+in+supermarkets&author=Arpenti+P.&author=Caccavale+R.&author=Paduano+G.&author=Fontanelli+G.+A.&author=Lippiello+V.&author=Villani+L.&author=Siciliano+B.&publication+year=2020&journal=IEEE+Robotics+and+Automation+Letters&volume=5&doi=10.1109%2FLRA.2020.3013936&pages=6233-6238\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Rgb-d+recognition+and+localization+of+cases+for+robotic+depalletizing+in+supermarkets&author=Arpenti+P.&author=Caccavale+R.&author=Paduano+G.&author=Fontanelli+G.+A.&author=Lippiello+V.&author=Villani+L.&author=Siciliano+B.&publication+year=2020&journal=IEEE+Robotics+and+Automation+Letters&volume=5&doi=10.1109%2FLRA.2020.3013936&pages=6233-6238",openUrlParams:{genre:e,atitle:eN,jtitle:eO,title:eO,volume:J,artnum:"65f99c3d442f7c0001a34846",spage:"6233",epage:"6238",date:l,sid:d,aulast:a,aufirst:a,doi:eP,au:a},innerRefId:"r133",title:eN,doi:eP,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2020.3013936",pubMedLink:a}]},{id:"ref134",displayNumber:"[134]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFontanelli\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG. A.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EPaduano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECaccavale\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EArpenti\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EP.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELippiello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EVillani\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EL.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EA reconfigurable gripper for robotic autonomous depalletizing in supermarket logistics\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Robot Autom Lett\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E5\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E3\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E4612\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E4617\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2020\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for A reconfigurable gripper for robotic autonomous depalletizing in supermarket logistics' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2020.3003283\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for A reconfigurable gripper for robotic autonomous depalletizing in supermarket logistics' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+reconfigurable+gripper+for+robotic+autonomous+depalletizing+in+supermarket+logistics&author=Fontanelli+G.+A.&author=Paduano+G.&author=Caccavale+R.&author=Arpenti+P.&author=Lippiello+V.&author=Villani+L.&author=Siciliano+B.&publication+year=2020&journal=IEEE+Robot+Autom+Lett&volume=5&doi=10.1109%2FLRA.2020.3003283&pages=4612-4617\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+reconfigurable+gripper+for+robotic+autonomous+depalletizing+in+supermarket+logistics&author=Fontanelli+G.+A.&author=Paduano+G.&author=Caccavale+R.&author=Arpenti+P.&author=Lippiello+V.&author=Villani+L.&author=Siciliano+B.&publication+year=2020&journal=IEEE+Robot+Autom+Lett&volume=5&doi=10.1109%2FLRA.2020.3003283&pages=4612-4617",openUrlParams:{genre:e,atitle:eQ,jtitle:s,title:s,volume:J,artnum:"65f99c3d442f7c0001a34847",spage:"4612",epage:"4617",date:l,sid:d,aulast:a,aufirst:a,doi:eR,au:a},innerRefId:"r134",title:eQ,doi:eR,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2020.3003283",pubMedLink:a}]},{id:"ref135",displayNumber:"[135]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBowyer\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES. A.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EDavies\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB. L.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBaena\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF. R.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EActive constraints\u002FVirtual fixtures: A survay\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Trans Robot\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E30\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E1\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E138\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E157\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2014\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Active constraints\u002FVirtual fixtures: A survay' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTRO.2013.2283410\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Active constraints\u002FVirtual fixtures: A survay' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Active+constraints%2FVirtual+fixtures%3A+A+survay&author=Bowyer+S.+A.&author=Davies+B.+L.&author=Baena+F.+R.&publication+year=2014&journal=IEEE+Trans+Robot&volume=30&doi=10.1109%2FTRO.2013.2283410&pages=138-157\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Active+constraints%2FVirtual+fixtures%3A+A+survay&author=Bowyer+S.+A.&author=Davies+B.+L.&author=Baena+F.+R.&publication+year=2014&journal=IEEE+Trans+Robot&volume=30&doi=10.1109%2FTRO.2013.2283410&pages=138-157",openUrlParams:{genre:e,atitle:eS,jtitle:m,title:m,volume:bb,artnum:"65f99c3d442f7c0001a34848",spage:"138",epage:au,date:D,sid:d,aulast:a,aufirst:a,doi:eT,au:a},innerRefId:"r135",title:eS,doi:eT,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTRO.2013.2283410",pubMedLink:a}]},{id:"ref136",displayNumber:"[136]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EIshii\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ETaylor\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER. H.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ESpatial motion constraints using virtual fixtures generated by anatomy\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Trans Robot\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E23\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E1\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E4\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E19\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2007\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Spatial motion constraints using virtual fixtures generated by anatomy' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTRO.2006.886838\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Spatial motion constraints using virtual fixtures generated by anatomy' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Spatial+motion+constraints+using+virtual+fixtures+generated+by+anatomy&author=Li+M.&author=Ishii+M.&author=Taylor+R.+H.&publication+year=2007&journal=IEEE+Trans+Robot&volume=23&doi=10.1109%2FTRO.2006.886838&pages=4-19\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Spatial+motion+constraints+using+virtual+fixtures+generated+by+anatomy&author=Li+M.&author=Ishii+M.&author=Taylor+R.+H.&publication+year=2007&journal=IEEE+Trans+Robot&volume=23&doi=10.1109%2FTRO.2006.886838&pages=4-19",openUrlParams:{genre:e,atitle:eU,jtitle:m,title:m,volume:"23",artnum:"65f99c3d442f7c0001a34849",spage:bi,epage:"19",date:_,sid:d,aulast:a,aufirst:a,doi:eV,au:a},innerRefId:"r136",title:eU,doi:eV,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTRO.2006.886838",pubMedLink:a}]},{id:"ref137",displayNumber:"[137]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMarinho\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM. M.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EAdorno\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB. V.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003Ek.\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EH.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMitsuishi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EDynamic active constraints for surgical robots using vector field inequalities\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Trans Robot\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E35\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E5\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E1166\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E1185\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2019\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Dynamic active constraints for surgical robots using vector field inequalities' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTRO.2019.2920078\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Dynamic active constraints for surgical robots using vector field inequalities' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Dynamic+active+constraints+for+surgical+robots+using+vector+field+inequalities&author=Marinho+M.+M.&author=Adorno+B.+V.&author=k.+H.&author=Mitsuishi+M.&publication+year=2019&journal=IEEE+Trans+Robot&volume=35&doi=10.1109%2FTRO.2019.2920078&pages=1166-1185\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Dynamic+active+constraints+for+surgical+robots+using+vector+field+inequalities&author=Marinho+M.+M.&author=Adorno+B.+V.&author=k.+H.&author=Mitsuishi+M.&publication+year=2019&journal=IEEE+Trans+Robot&volume=35&doi=10.1109%2FTRO.2019.2920078&pages=1166-1185",openUrlParams:{genre:e,atitle:eW,jtitle:m,title:m,volume:aF,artnum:"65f99c3d442f7c0001a3484a",spage:"1166",epage:"1185",date:k,sid:d,aulast:a,aufirst:a,doi:eX,au:a},innerRefId:"r137",title:eW,doi:eX,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTRO.2019.2920078",pubMedLink:a}]},{id:"ref138",displayNumber:"[138]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EAmes\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA. D.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECoogan\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EEgerstedt\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ENotomista\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESreenath\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EK.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ETabuada\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EP.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EControl Barrier Functions: Theory and Applications\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EProc. 18th European Control Conference (ECC)\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2019\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E3420\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E3431\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Control Barrier Functions: Theory and Applications' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Control+Barrier+Functions%3A+Theory+and+Applications&author=Ames+A.+D.&author=Coogan+S.&author=Egerstedt+M.&author=Notomista+G.&author=Sreenath+K.&author=Tabuada+P.&publication+year=2019\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Control+Barrier+Functions%3A+Theory+and+Applications&author=Ames+A.+D.&author=Coogan+S.&author=Egerstedt+M.&author=Notomista+G.&author=Sreenath+K.&author=Tabuada+P.&publication+year=2019",openUrlParams:{genre:h,date:k,sid:d,title:eY},innerRefId:"r138",title:eY,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref139",displayNumber:"[139]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EKim\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EU.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EKim\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EY. B.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESeok\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED.-Y.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESo\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EChoi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EH. R.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EA surgical palpation probe with 6-axis force\u002Ftorque sensing capability for minimally invasive surgery\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Trans Ind Electron\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E65\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E3\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E2755\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E2765\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2018\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for A surgical palpation probe with 6-axis force\u002Ftorque sensing capability for minimally invasive surgery' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTIE.2017.2739681\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for A surgical palpation probe with 6-axis force\u002Ftorque sensing capability for minimally invasive surgery' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+surgical+palpation+probe+with+6-axis+force%2Ftorque+sensing+capability+for+minimally+invasive+surgery&author=Kim+U.&author=Kim+Y.+B.&author=Seok+D.-Y.&author=So+J.&author=Choi+H.+R.&publication+year=2018&journal=IEEE+Trans+Ind+Electron&volume=65&doi=10.1109%2FTIE.2017.2739681&pages=2755-2765\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+surgical+palpation+probe+with+6-axis+force%2Ftorque+sensing+capability+for+minimally+invasive+surgery&author=Kim+U.&author=Kim+Y.+B.&author=Seok+D.-Y.&author=So+J.&author=Choi+H.+R.&publication+year=2018&journal=IEEE+Trans+Ind+Electron&volume=65&doi=10.1109%2FTIE.2017.2739681&pages=2755-2765",openUrlParams:{genre:e,atitle:eZ,jtitle:e_,title:e_,volume:"65",artnum:"65f99c3d442f7c0001a3484c",spage:"2755",epage:"2765",date:n,sid:d,aulast:a,aufirst:a,doi:e$,au:a},innerRefId:"r139",title:eZ,doi:e$,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTIE.2017.2739681",pubMedLink:a}]},{id:"ref140",displayNumber:"[140]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELee\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED.-H.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EKim\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EU.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGulrez\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ET.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EYoon\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EW. J.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EHannaford\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EChoi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EH. R.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EA laparoscopic grasping tool with force sensing capability\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE\u002FASME Trans Mech\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E21\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E1\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E130\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E141\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2016\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for A laparoscopic grasping tool with force sensing capability' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+laparoscopic+grasping+tool+with+force+sensing+capability&author=Lee+D.-H.&author=Kim+U.&author=Gulrez+T.&author=Yoon+W.+J.&author=Hannaford+B.&author=Choi+H.+R.&publication+year=2016&journal=IEEE%2FASME+Trans+Mech&volume=21&pages=130-141\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+laparoscopic+grasping+tool+with+force+sensing+capability&author=Lee+D.-H.&author=Kim+U.&author=Gulrez+T.&author=Yoon+W.+J.&author=Hannaford+B.&author=Choi+H.+R.&publication+year=2016&journal=IEEE%2FASME+Trans+Mech&volume=21&pages=130-141",openUrlParams:{genre:e,atitle:fa,jtitle:U,title:U,volume:ae,artnum:"65f99c3d442f7c0001a3484d",spage:"130",epage:"141",date:v,sid:d,aulast:a,aufirst:a,doi:a,au:a},innerRefId:"r140",title:fa,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref141",displayNumber:"[141]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECatalano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGrioli\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFarnioli\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EE.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESerio\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EPiazza\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EC.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBicchi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EAdaptive synergies for the design and control of the pisa\u002Fiit softhand\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EInt J Robot Res\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E33\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E5\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E768\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E782\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2014\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Adaptive synergies for the design and control of the pisa\u002Fiit softhand' href=https:\u002F\u002Fdx.doi.org\u002F10.1177\u002F0278364913518998\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Adaptive synergies for the design and control of the pisa\u002Fiit softhand' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Adaptive+synergies+for+the+design+and+control+of+the+pisa%2Fiit+softhand&author=Catalano+M.&author=Grioli+G.&author=Farnioli+E.&author=Serio+A.&author=Piazza+C.&author=Bicchi+A.&publication+year=2014&journal=Int+J+Robot+Res&volume=33&doi=10.1177%2F0278364913518998&pages=768-782\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Adaptive+synergies+for+the+design+and+control+of+the+pisa%2Fiit+softhand&author=Catalano+M.&author=Grioli+G.&author=Farnioli+E.&author=Serio+A.&author=Piazza+C.&author=Bicchi+A.&publication+year=2014&journal=Int+J+Robot+Res&volume=33&doi=10.1177%2F0278364913518998&pages=768-782",openUrlParams:{genre:e,atitle:fb,jtitle:w,title:w,volume:dg,artnum:"65f99c3d442f7c0001a3484e",spage:"768",epage:"782",date:D,sid:d,aulast:a,aufirst:a,doi:fc,au:a},innerRefId:"r141",title:fb,doi:fc,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1177\u002F0278364913518998",pubMedLink:a}]},{id:"ref142",displayNumber:"[142]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EPiazza\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EC.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECatalano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM. G.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGodfrey\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES. B.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ERossi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGrioli\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBianchi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EZhao\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EK.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBicchi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EThe softhand pro-h: A hybrid body-controlled, electrically powered hand prosthesis for daily living and working\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Robot Autom Mag\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E24\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E4\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E87\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E101\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2017\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for The softhand pro-h: A hybrid body-controlled, electrically powered hand prosthesis for daily living and working' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FMRA.2017.2751662\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for The softhand pro-h: A hybrid body-controlled, electrically powered hand prosthesis for daily living and working' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=The+softhand+pro-h%3A+A+hybrid+body-controlled%2C+electrically+powered+hand+prosthesis+for+daily+living+and+working&author=Piazza+C.&author=Catalano+M.+G.&author=Godfrey+S.+B.&author=Rossi+M.&author=Grioli+G.&author=Bianchi+M.&author=Zhao+K.&author=Bicchi+A.&publication+year=2017&journal=IEEE+Robot+Autom+Mag&volume=24&doi=10.1109%2FMRA.2017.2751662&pages=87-101\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=The+softhand+pro-h%3A+A+hybrid+body-controlled%2C+electrically+powered+hand+prosthesis+for+daily+living+and+working&author=Piazza+C.&author=Catalano+M.+G.&author=Godfrey+S.+B.&author=Rossi+M.&author=Grioli+G.&author=Bianchi+M.&author=Zhao+K.&author=Bicchi+A.&publication+year=2017&journal=IEEE+Robot+Autom+Mag&volume=24&doi=10.1109%2FMRA.2017.2751662&pages=87-101",openUrlParams:{genre:e,atitle:fd,jtitle:R,title:R,volume:dI,artnum:"65f99c3d442f7c0001a3484f",spage:dJ,epage:"101",date:A,sid:d,aulast:a,aufirst:a,doi:fe,au:a},innerRefId:"r142",title:fd,doi:fe,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FMRA.2017.2751662",pubMedLink:a}]},{id:"ref143",displayNumber:"[143]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMoccia\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESelvaggio\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EVillani\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EL.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFicuciello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E. \u003Cspan class=\"article-title\"\u003EVision-Based Virtual Fixtures Generation for Robotic-Assisted Polyp Dissection Procedures\u003C\u002Fspan\u003E. In: \u003Cem class=\"italic\"\u003EIEEE\u002FRSJ International Conference on Intelligent Robots and Systems (IROS)\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2019\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E7934\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E7939\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Vision-Based Virtual Fixtures Generation for Robotic-Assisted Polyp Dissection Procedures' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Vision-Based+Virtual+Fixtures+Generation+for+Robotic-Assisted+Polyp+Dissection+Procedures&author=Moccia+R.&author=Selvaggio+M.&author=Villani+L.&author=Siciliano+B.&author=Ficuciello+F.&publication+year=2019\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Vision-Based+Virtual+Fixtures+Generation+for+Robotic-Assisted+Polyp+Dissection+Procedures&author=Moccia+R.&author=Selvaggio+M.&author=Villani+L.&author=Siciliano+B.&author=Ficuciello+F.&publication+year=2019",openUrlParams:{genre:h,date:k,sid:d,title:ff},innerRefId:"r143",title:ff,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref144",displayNumber:"[144]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMoccia\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EIacono\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EC.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFicuciello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EVision-based dynamic virtual fixtures for tools collision avoidance in robotic surgery\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Robot Auto Lett\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E5\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E2\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E1650\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E1655\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2020\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Vision-based dynamic virtual fixtures for tools collision avoidance in robotic surgery' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2020.2969941\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Vision-based dynamic virtual fixtures for tools collision avoidance in robotic surgery' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Vision-based+dynamic+virtual+fixtures+for+tools+collision+avoidance+in+robotic+surgery&author=Moccia+R.&author=Iacono+C.&author=Siciliano+B.&author=Ficuciello+F.&publication+year=2020&journal=IEEE+Robot+Auto+Lett&volume=5&doi=10.1109%2FLRA.2020.2969941&pages=1650-1655\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Vision-based+dynamic+virtual+fixtures+for+tools+collision+avoidance+in+robotic+surgery&author=Moccia+R.&author=Iacono+C.&author=Siciliano+B.&author=Ficuciello+F.&publication+year=2020&journal=IEEE+Robot+Auto+Lett&volume=5&doi=10.1109%2FLRA.2020.2969941&pages=1650-1655",openUrlParams:{genre:e,atitle:fg,jtitle:i,title:i,volume:J,artnum:"65f99c3d442f7c0001a34851",spage:"1650",epage:"1655",date:l,sid:d,aulast:a,aufirst:a,doi:fh,au:a},innerRefId:"r144",title:fg,doi:fh,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2020.2969941",pubMedLink:a}]},{id:"ref145",displayNumber:"[145]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELiu\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EH.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESelvaggio\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFerrentino\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EP.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMoccia\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EPirozzi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBracale\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EU.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFicuciello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EThe MUSHA hand II: A multi-functional hand for robot-assisted laparoscopic surgery\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE\u002FASME Trans Mech\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E26\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E1\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E393\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E404\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2020\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for The MUSHA hand II: A multi-functional hand for robot-assisted laparoscopic surgery' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=The+MUSHA+hand+II%3A+A+multi-functional+hand+for+robot-assisted+laparoscopic+surgery&author=Liu+H.&author=Selvaggio+M.&author=Ferrentino+P.&author=Moccia+R.&author=Pirozzi+S.&author=Bracale+U.&author=Ficuciello+F.&publication+year=2020&journal=IEEE%2FASME+Trans+Mech&volume=26&pages=393-404\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=The+MUSHA+hand+II%3A+A+multi-functional+hand+for+robot-assisted+laparoscopic+surgery&author=Liu+H.&author=Selvaggio+M.&author=Ferrentino+P.&author=Moccia+R.&author=Pirozzi+S.&author=Bracale+U.&author=Ficuciello+F.&publication+year=2020&journal=IEEE%2FASME+Trans+Mech&volume=26&pages=393-404",openUrlParams:{genre:e,atitle:fi,jtitle:U,title:U,volume:"26",artnum:"65f99c3d442f7c0001a34852",spage:"393",epage:"404",date:l,sid:d,aulast:a,aufirst:a,doi:a,au:a},innerRefId:"r145",title:fi,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref146",displayNumber:"[146]",existInContent:c,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESaini\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFicuciello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELiu\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EH.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"collab\"\u003EU\u003C\u002Fspan\u003E, \u003Cspan class=\"chapter-title\"\u003EBracale Patent Granted n. 102019000001187, Application Submission Date Jan 2019\u003C\u002Fspan\u003E,” In: \u003Cspan class=\"source\"\u003EElemento Terminale Per Dispositivi Di Presa Per Interventi Chirurgici, in Particolare Interventi a Minima Invasività\u003C\u002Fspan\u003E, (\u003Cspan class=\"year\"\u003E2019\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Elemento Terminale Per Dispositivi Di Presa Per Interventi Chirurgici, in Particolare Interventi a Minima Invasività' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Elemento+Terminale+Per+Dispositivi+Di+Presa+Per+Interventi+Chirurgici%2C+in+Particolare+Interventi+a+Minima+Invasivit%C3%A0&author=Saini+S.&author=Ficuciello+F.&author=Liu+H.&publication+year=2019\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Elemento+Terminale+Per+Dispositivi+Di+Presa+Per+Interventi+Chirurgici%2C+in+Particolare+Interventi+a+Minima+Invasivit%C3%A0&author=Saini+S.&author=Ficuciello+F.&author=Liu+H.&publication+year=2019",openUrlParams:{genre:o,btitle:av,title:av,atitle:"Bracale Patent Granted n. 102019000001187, Application Submission Date Jan 2019",aulast:a,aufirst:a,au:a,pub:f,date:k,spage:f,epage:f,doi:a,sid:d},innerRefId:"r146",title:av,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref147",displayNumber:"[147]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESelvaggio\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFontanelli\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG. A.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMarrazzo\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV. R.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBracale\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EU.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EIrace\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBreglio\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EVillani\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EL.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFicuciello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EThe musha underactuated hand for robot-aided minimally invasive surgery\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EInt J Med Robot Comp Assis Surg\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E15\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E3\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003Ee1981\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2019\u003C\u002Fspan\u003Ea).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for The musha underactuated hand for robot-aided minimally invasive surgery' href=https:\u002F\u002Fdx.doi.org\u002F10.1002\u002Frcs.1981\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for The musha underactuated hand for robot-aided minimally invasive surgery' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=The+musha+underactuated+hand+for+robot-aided+minimally+invasive+surgery&author=Selvaggio+M.&author=Fontanelli+G.+A.&author=Marrazzo+V.+R.&author=Bracale+U.&author=Irace+A.&author=Breglio+G.&author=Villani+L.&author=Siciliano+B.&author=Ficuciello+F.&publication+year=2019&journal=Int+J+Med+Robot+Comp+Assis+Surg&volume=15&doi=10.1002%2Frcs.1981\u003EGoogle Scholar\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='PubMed link for The musha underactuated hand for robot-aided minimally invasive surgery' href=https:\u002F\u002Fwww.ncbi.nlm.nih.gov\u002Fpubmed\u002F30588772\u003EPubMed\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=The+musha+underactuated+hand+for+robot-aided+minimally+invasive+surgery&author=Selvaggio+M.&author=Fontanelli+G.+A.&author=Marrazzo+V.+R.&author=Bracale+U.&author=Irace+A.&author=Breglio+G.&author=Villani+L.&author=Siciliano+B.&author=Ficuciello+F.&publication+year=2019&journal=Int+J+Med+Robot+Comp+Assis+Surg&volume=15&doi=10.1002%2Frcs.1981",openUrlParams:{genre:e,atitle:fj,jtitle:fk,title:fk,volume:"15",artnum:"65f99c3d442f7c0001a34854",spage:"e1981",epage:f,date:k,sid:d,aulast:a,aufirst:a,doi:fl,au:a},innerRefId:"r147",title:fj,pubMedId:"30588772",doi:fl,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1002\u002Frcs.1981",pubMedLink:"https:\u002F\u002Fwww.ncbi.nlm.nih.gov\u002Fpubmed\u002F30588772"}]},{id:"ref148",displayNumber:"[148]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMoccia\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFicuciello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EAutonomous Endoscope Control Algorithm with Visibility and Joint Limits Avoidance Constraints for Da Vinci Research kit robot\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EIEEE International Conference on Robotics and Automation (ICRA)\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2023\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E776\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E781\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Autonomous Endoscope Control Algorithm with Visibility and Joint Limits Avoidance Constraints for Da Vinci Research kit robot' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Autonomous+Endoscope+Control+Algorithm+with+Visibility+and+Joint+Limits+Avoidance+Constraints+for+Da+Vinci+Research+kit+robot&author=Moccia+R.&author=Ficuciello+F.&publication+year=2023\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Autonomous+Endoscope+Control+Algorithm+with+Visibility+and+Joint+Limits+Avoidance+Constraints+for+Da+Vinci+Research+kit+robot&author=Moccia+R.&author=Ficuciello+F.&publication+year=2023",openUrlParams:{genre:h,date:q,sid:d,title:fm},innerRefId:"r148",title:fm,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref149",displayNumber:"[149]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFerro\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBrunori\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMagistri\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESaiella\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EL.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESelvaggio\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFontanelli\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG. A.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EA Portable Da Vinci Simulator in Virtual Reality\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EThird IEEE International Conference on Robotic Computing (IRC)\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2019\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E447\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E448\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for A Portable Da Vinci Simulator in Virtual Reality' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+Portable+Da+Vinci+Simulator+in+Virtual+Reality&author=Ferro+M.&author=Brunori+D.&author=Magistri+F.&author=Saiella+L.&author=Selvaggio+M.&author=Fontanelli+G.+A.&publication+year=2019\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=A+Portable+Da+Vinci+Simulator+in+Virtual+Reality&author=Ferro+M.&author=Brunori+D.&author=Magistri+F.&author=Saiella+L.&author=Selvaggio+M.&author=Fontanelli+G.+A.&publication+year=2019",openUrlParams:{genre:h,date:k,sid:d,title:fn},innerRefId:"r149",title:fn,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref150",displayNumber:"[150]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFontanelli\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG. A.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESelvaggio\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFerro\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFicuciello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EVendittelli\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EPortable dVRK: An augmented V-REP simulator of da vinci research kit\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EActa Polytech Hung\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E16\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E8\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E79\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E98\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2019\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Portable dVRK: An augmented V-REP simulator of da vinci research kit' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Portable+dVRK%3A+An+augmented+V-REP+simulator+of+da+vinci+research+kit&author=Fontanelli+G.+A.&author=Selvaggio+M.&author=Ferro+M.&author=Ficuciello+F.&author=Vendittelli+M.&author=Siciliano+B.&publication+year=2019&journal=Acta+Polytech+Hung&volume=16&pages=79-98\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Portable+dVRK%3A+An+augmented+V-REP+simulator+of+da+vinci+research+kit&author=Fontanelli+G.+A.&author=Selvaggio+M.&author=Ferro+M.&author=Ficuciello+F.&author=Vendittelli+M.&author=Siciliano+B.&publication+year=2019&journal=Acta+Polytech+Hung&volume=16&pages=79-98",openUrlParams:{genre:e,atitle:fo,jtitle:fp,title:fp,volume:de,artnum:"65f99c3d442f7c0001a34857",spage:ec,epage:"98",date:k,sid:d,aulast:a,aufirst:a,doi:a,au:a},innerRefId:"r150",title:fo,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref151",displayNumber:"[151]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGhafoor\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EDai\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ. S.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EDuffy\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EStiffness modeling of the soft-finger contact in robotic grasping\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EJ Mech Design\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E126\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E4\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E646\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E656\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2004\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Stiffness modeling of the soft-finger contact in robotic grasping' href=https:\u002F\u002Fdx.doi.org\u002F10.1115\u002F1.1758255\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Stiffness modeling of the soft-finger contact in robotic grasping' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Stiffness+modeling+of+the+soft-finger+contact+in+robotic+grasping&author=Ghafoor+A.&author=Dai+J.+S.&author=Duffy+J.&publication+year=2004&journal=J+Mech+Design&volume=126&doi=10.1115%2F1.1758255&pages=646-656\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Stiffness+modeling+of+the+soft-finger+contact+in+robotic+grasping&author=Ghafoor+A.&author=Dai+J.+S.&author=Duffy+J.&publication+year=2004&journal=J+Mech+Design&volume=126&doi=10.1115%2F1.1758255&pages=646-656",openUrlParams:{genre:e,atitle:fq,jtitle:fr,title:fr,volume:"126",artnum:"65f99c3d442f7c0001a34858",spage:"646",epage:"656",date:W,sid:d,aulast:a,aufirst:a,doi:fs,au:a},innerRefId:"r151",title:fq,doi:fs,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1115\u002F1.1758255",pubMedLink:a}]},{id:"ref152",displayNumber:"[152]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESallam\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFontanelli\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG. A.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGallo\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELa Rocca\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EDi Spiezio Sardo\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELongo\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EN.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFicuciello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EPrototype realization of a human hand-inspired needle driver for robotic-assisted surgery\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Trans Med Robot Bio\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E5\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E4\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E843\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E856\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2023\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Prototype realization of a human hand-inspired needle driver for robotic-assisted surgery' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTMRB.2023.3309942\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Prototype realization of a human hand-inspired needle driver for robotic-assisted surgery' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Prototype+realization+of+a+human+hand-inspired+needle+driver+for+robotic-assisted+surgery&author=Sallam+M.&author=Fontanelli+G.+A.&author=Gallo+A.&author=La+Rocca+R.&author=Di+Spiezio+Sardo+A.&author=Longo+N.&author=Ficuciello+F.&publication+year=2023&journal=IEEE+Trans+Med+Robot+Bio&volume=5&doi=10.1109%2FTMRB.2023.3309942&pages=843-856\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Prototype+realization+of+a+human+hand-inspired+needle+driver+for+robotic-assisted+surgery&author=Sallam+M.&author=Fontanelli+G.+A.&author=Gallo+A.&author=La+Rocca+R.&author=Di+Spiezio+Sardo+A.&author=Longo+N.&author=Ficuciello+F.&publication+year=2023&journal=IEEE+Trans+Med+Robot+Bio&volume=5&doi=10.1109%2FTMRB.2023.3309942&pages=843-856",openUrlParams:{genre:e,atitle:ft,jtitle:fu,title:fu,volume:J,artnum:"65f99c3d442f7c0001a34859",spage:"843",epage:"856",date:q,sid:d,aulast:a,aufirst:a,doi:fv,au:a},innerRefId:"r152",title:ft,doi:fv,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FTMRB.2023.3309942",pubMedLink:a}]},{id:"ref153",displayNumber:"[153]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECoevoet\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EE.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EAdagolodjo\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EY.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELin\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EDuriez\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EC.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFicuciello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EPlanning of soft-rigid hybrid arms in contact with compliant environment: Application to the transrectal biopsy of the prostate\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EIEEE Robot Auto Lett\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E7\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E2\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E4853\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E4860\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2022\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Planning of soft-rigid hybrid arms in contact with compliant environment: Application to the transrectal biopsy of the prostate' href=https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2022.3152322\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Planning of soft-rigid hybrid arms in contact with compliant environment: Application to the transrectal biopsy of the prostate' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Planning+of+soft-rigid+hybrid+arms+in+contact+with+compliant+environment%3A+Application+to+the+transrectal+biopsy+of+the+prostate&author=Coevoet+E.&author=Adagolodjo+Y.&author=Lin+M.&author=Duriez+C.&author=Ficuciello+F.&publication+year=2022&journal=IEEE+Robot+Auto+Lett&volume=7&doi=10.1109%2FLRA.2022.3152322&pages=4853-4860\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Planning+of+soft-rigid+hybrid+arms+in+contact+with+compliant+environment%3A+Application+to+the+transrectal+biopsy+of+the+prostate&author=Coevoet+E.&author=Adagolodjo+Y.&author=Lin+M.&author=Duriez+C.&author=Ficuciello+F.&publication+year=2022&journal=IEEE+Robot+Auto+Lett&volume=7&doi=10.1109%2FLRA.2022.3152322&pages=4853-4860",openUrlParams:{genre:e,atitle:fw,jtitle:i,title:i,volume:eI,artnum:"65f99c3d442f7c0001a3485a",spage:"4853",epage:"4860",date:j,sid:d,aulast:a,aufirst:a,doi:fx,au:a},innerRefId:"r153",title:fw,doi:fx,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1109\u002FLRA.2022.3152322",pubMedLink:a}]},{id:"ref154",displayNumber:"[154]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECanbay\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFerrentino\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EP.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELiu\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EH.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMoccia\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ER.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EPirozzi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESiciliano\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFicuciello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ECalibration of Tactile\u002FForce Sensors for Grasping with the PRISMA Hand II\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EIEEE\u002FASME International Conference on Advanced Intelligent Mechatronics (AIM)\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2021\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E442\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E447\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Calibration of Tactile\u002FForce Sensors for Grasping with the PRISMA Hand II' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Calibration+of+Tactile%2FForce+Sensors+for+Grasping+with+the+PRISMA+Hand+II&author=Canbay+D.&author=Ferrentino+P.&author=Liu+H.&author=Moccia+R.&author=Pirozzi+S.&author=Siciliano+B.&author=Ficuciello+F.&publication+year=2021\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Calibration+of+Tactile%2FForce+Sensors+for+Grasping+with+the+PRISMA+Hand+II&author=Canbay+D.&author=Ferrentino+P.&author=Liu+H.&author=Moccia+R.&author=Pirozzi+S.&author=Siciliano+B.&author=Ficuciello+F.&publication+year=2021",openUrlParams:{genre:h,date:t,sid:d,title:fy},innerRefId:"r154",title:fy,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref155",displayNumber:"[155]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELeccia\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESallam\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGrazioso\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECaporaso\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ET.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EDi Gironimo\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFicuciello\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EDevelopment and testing of a virtual simulator for a myoelectric prosthesis prototype – the prisma hand ii – to improve its usability and acceptability\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EEng Appl Artif Intel\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E121\u003C\u002Fspan\u003E, \u003Cspan class=\"fpage\"\u003E105853\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2023\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Development and testing of a virtual simulator for a myoelectric prosthesis prototype – the prisma hand ii – to improve its usability and acceptability' href=https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.engappai.2023.105853\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Development and testing of a virtual simulator for a myoelectric prosthesis prototype – the prisma hand ii – to improve its usability and acceptability' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Development+and+testing+of+a+virtual+simulator+for+a+myoelectric+prosthesis+prototype+%E2%80%93+the+prisma+hand+ii+%E2%80%93+to+improve+its+usability+and+acceptability&author=Leccia+A.&author=Sallam+M.&author=Grazioso+S.&author=Caporaso+T.&author=Di+Gironimo+G.&author=Ficuciello+F.&publication+year=2023&journal=Eng+Appl+Artif+Intel&volume=121&doi=10.1016%2Fj.engappai.2023.105853\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Development+and+testing+of+a+virtual+simulator+for+a+myoelectric+prosthesis+prototype+%E2%80%93+the+prisma+hand+ii+%E2%80%93+to+improve+its+usability+and+acceptability&author=Leccia+A.&author=Sallam+M.&author=Grazioso+S.&author=Caporaso+T.&author=Di+Gironimo+G.&author=Ficuciello+F.&publication+year=2023&journal=Eng+Appl+Artif+Intel&volume=121&doi=10.1016%2Fj.engappai.2023.105853",openUrlParams:{genre:e,atitle:fz,jtitle:fA,title:fA,volume:"121",artnum:"65f99c3d442f7c0001a3485c",spage:"105853",epage:f,date:q,sid:d,aulast:a,aufirst:a,doi:fB,au:a},innerRefId:"r155",title:fz,doi:fB,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.engappai.2023.105853",pubMedLink:a}]},{id:"ref156",displayNumber:"[156]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGong\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EY.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESun\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ENair\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBidwai\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EA.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ER.\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EC. S.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGrezmak\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESartoretti\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EDaltorio\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EK. A.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ELegged robots for object manipulation: A review\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EFront Mech Eng\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E9\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2023\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Legged robots for object manipulation: A review' href=https:\u002F\u002Fdx.doi.org\u002F10.3389\u002Ffmech.2023.1142421\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Legged robots for object manipulation: A review' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Legged+robots+for+object+manipulation%3A+A+review&author=Gong+Y.&author=Sun+G.&author=Nair+A.&author=Bidwai+A.&author=R.+C.+S.&author=Grezmak+J.&author=Sartoretti+G.&author=Daltorio+K.+A.&publication+year=2023&journal=Front+Mech+Eng&volume=9&doi=10.3389%2Ffmech.2023.1142421\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Legged+robots+for+object+manipulation%3A+A+review&author=Gong+Y.&author=Sun+G.&author=Nair+A.&author=Bidwai+A.&author=R.+C.+S.&author=Grezmak+J.&author=Sartoretti+G.&author=Daltorio+K.+A.&publication+year=2023&journal=Front+Mech+Eng&volume=9&doi=10.3389%2Ffmech.2023.1142421",openUrlParams:{genre:e,atitle:fC,jtitle:fD,title:fD,volume:O,artnum:"65f99c3d442f7c0001a3485d",spage:f,epage:f,date:q,sid:d,aulast:a,aufirst:a,doi:fE,au:a},innerRefId:"r156",title:fC,doi:fE,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.3389\u002Ffmech.2023.1142421",pubMedLink:a}]},{id:"ref157",displayNumber:"[157]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EJia\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EHuang\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EH.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EWu\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EY.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECao\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EQ.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EGuo\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EH.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ESynthesis of a novel type of metamorphic mechanism module for large scale deployable grasping manipulators\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EMech Mach Theory\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E128\u003C\u002Fspan\u003E, \u003Cspan class=\"fpage\"\u003E544\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E559\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2018\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Synthesis of a novel type of metamorphic mechanism module for large scale deployable grasping manipulators' href=https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.mechmachtheory.2018.06.017\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Synthesis of a novel type of metamorphic mechanism module for large scale deployable grasping manipulators' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Synthesis+of+a+novel+type+of+metamorphic+mechanism+module+for+large+scale+deployable+grasping+manipulators&author=Jia+G.&author=Huang+H.&author=Li+B.&author=Wu+Y.&author=Cao+Q.&author=Guo+H.&publication+year=2018&journal=Mech+Mach+Theory&volume=128&doi=10.1016%2Fj.mechmachtheory.2018.06.017&pages=544-559\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Synthesis+of+a+novel+type+of+metamorphic+mechanism+module+for+large+scale+deployable+grasping+manipulators&author=Jia+G.&author=Huang+H.&author=Li+B.&author=Wu+Y.&author=Cao+Q.&author=Guo+H.&publication+year=2018&journal=Mech+Mach+Theory&volume=128&doi=10.1016%2Fj.mechmachtheory.2018.06.017&pages=544-559",openUrlParams:{genre:e,atitle:fF,jtitle:y,title:y,volume:"128",artnum:"65f99c3d442f7c0001a3485e",spage:"544",epage:"559",date:n,sid:d,aulast:a,aufirst:a,doi:fG,au:a},innerRefId:"r157",title:fF,doi:fG,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.mechmachtheory.2018.06.017",pubMedLink:a}]},{id:"ref158",displayNumber:"[158]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EJia\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EG.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EHuang\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EH.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EWang\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ELi\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EB.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EType synthesis of plane-symmetric deployable grasping parallel mechanisms using constraint force parallelogram law\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EMech Mach Theory\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E161\u003C\u002Fspan\u003E, \u003Cspan class=\"fpage\"\u003E104330\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2021\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Type synthesis of plane-symmetric deployable grasping parallel mechanisms using constraint force parallelogram law' href=https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.mechmachtheory.2021.104330\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Type synthesis of plane-symmetric deployable grasping parallel mechanisms using constraint force parallelogram law' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Type+synthesis+of+plane-symmetric+deployable+grasping+parallel+mechanisms+using+constraint+force+parallelogram+law&author=Jia+G.&author=Huang+H.&author=Wang+S.&author=Li+B.&publication+year=2021&journal=Mech+Mach+Theory&volume=161&doi=10.1016%2Fj.mechmachtheory.2021.104330\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Type+synthesis+of+plane-symmetric+deployable+grasping+parallel+mechanisms+using+constraint+force+parallelogram+law&author=Jia+G.&author=Huang+H.&author=Wang+S.&author=Li+B.&publication+year=2021&journal=Mech+Mach+Theory&volume=161&doi=10.1016%2Fj.mechmachtheory.2021.104330",openUrlParams:{genre:e,atitle:fH,jtitle:y,title:y,volume:"161",artnum:"65f99c3d442f7c0001a3485f",spage:"104330",epage:f,date:t,sid:d,aulast:a,aufirst:a,doi:fI,au:a},innerRefId:"r158",title:fH,doi:fI,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.mechmachtheory.2021.104330",pubMedLink:a}]},{id:"ref159",displayNumber:"[159]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBellicoso\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EC. D.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EKrämer\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EK.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EStäuble\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESako\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EJenelten\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EF.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EBjelonic\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EHutter\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EAlma - Articulated Locomotion and Manipulation for a Torque-Controllable Robot\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003EInternational Conference on Robotics and Automation (ICRA)\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2019\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E8477\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E8483\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Alma - Articulated Locomotion and Manipulation for a Torque-Controllable Robot' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Alma+-+Articulated+Locomotion+and+Manipulation+for+a+Torque-Controllable+Robot&author=Bellicoso+C.+D.&author=Kr%C3%A4mer+K.&author=St%C3%A4uble+M.&author=Sako+D.&author=Jenelten+F.&author=Bjelonic+M.&author=Hutter+M.&publication+year=2019\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Alma+-+Articulated+Locomotion+and+Manipulation+for+a+Torque-Controllable+Robot&author=Bellicoso+C.+D.&author=Kr%C3%A4mer+K.&author=St%C3%A4uble+M.&author=Sako+D.&author=Jenelten+F.&author=Bjelonic+M.&author=Hutter+M.&publication+year=2019",openUrlParams:{genre:h,date:k,sid:d,title:fJ},innerRefId:"r159",title:fJ,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref160",displayNumber:"[160]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EFerrolho\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EH.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EIvan\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EV.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EMerkt\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EW.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EHavoutis\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EI.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EVijayakumar\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ES.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003ERoloma: Robust loco-manipulation for quadruped robots with arms\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003EAuton Robot\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E47\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E8\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E1463\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E1481\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2023\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Roloma: Robust loco-manipulation for quadruped robots with arms' href=https:\u002F\u002Fdx.doi.org\u002F10.1007\u002Fs10514-023-10146-0\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Roloma: Robust loco-manipulation for quadruped robots with arms' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Roloma%3A+Robust+loco-manipulation+for+quadruped+robots+with+arms&author=Ferrolho+H.&author=Ivan+V.&author=Merkt+W.&author=Havoutis+I.&author=Vijayakumar+S.&publication+year=2023&journal=Auton+Robot&volume=47&doi=10.1007%2Fs10514-023-10146-0&pages=1463-1481\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Roloma%3A+Robust+loco-manipulation+for+quadruped+robots+with+arms&author=Ferrolho+H.&author=Ivan+V.&author=Merkt+W.&author=Havoutis+I.&author=Vijayakumar+S.&publication+year=2023&journal=Auton+Robot&volume=47&doi=10.1007%2Fs10514-023-10146-0&pages=1463-1481",openUrlParams:{genre:e,atitle:fK,jtitle:x,title:x,volume:"47",artnum:"65f99c3d442f7c0001a34861",spage:"1463",epage:"1481",date:q,sid:d,aulast:a,aufirst:a,doi:fL,au:a},innerRefId:"r160",title:fK,doi:fL,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1007\u002Fs10514-023-10146-0",pubMedLink:a}]},{id:"ref161",displayNumber:"[161]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECostanzo\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ENatale\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EC.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ESelvaggio\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EM.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EVisual and Haptic Cues for Human-Robot Handover*\u003C\u002Fspan\u003E,” In: \u003Cem class=\"italic\"\u003E32nd IEEE International Conference on Robot and Human Interactive Communication (RO–MAN)\u003C\u002Fem\u003E, (\u003Cspan class=\"year\"\u003E2023\u003C\u002Fspan\u003E) pp. \u003Cspan class=\"fpage\"\u003E2677\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E2682\u003C\u002Fspan\u003E.\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Visual and Haptic Cues for Human-Robot Handover*' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Visual+and+Haptic+Cues+for+Human-Robot+Handover*&author=Costanzo+M.&author=Natale+C.&author=Selvaggio+M.&publication+year=2023\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Visual+and+Haptic+Cues+for+Human-Robot+Handover*&author=Costanzo+M.&author=Natale+C.&author=Selvaggio+M.&publication+year=2023",openUrlParams:{genre:h,date:q,sid:d,title:fM},innerRefId:"r161",title:fM,doi:a,crossRefLink:a,pubMedLink:a}]},{id:"ref162",displayNumber:"[162]",existInContent:b,content:"\u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003EDai\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003EJ. S.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E and \u003Cspan class=\"string-name\"\u003E\u003Cspan class=\"surname\"\u003ECaldwell\u003C\u002Fspan\u003E, \u003Cspan class=\"given-names\"\u003ED. G.\u003C\u002Fspan\u003E\u003C\u002Fspan\u003E, “\u003Cspan class=\"article-title\"\u003EOrigami-based robotic paper-and-board packaging for food industry\u003C\u002Fspan\u003E,” \u003Cspan class=\"source\"\u003ETrend Food Sci Tech\u003C\u002Fspan\u003E \u003Cspan class=\"volume\"\u003E21\u003C\u002Fspan\u003E(\u003Cspan class=\"issue\"\u003E3\u003C\u002Fspan\u003E), \u003Cspan class=\"fpage\"\u003E153\u003C\u002Fspan\u003E–\u003Cspan class=\"lpage\"\u003E157\u003C\u002Fspan\u003E (\u003Cspan class=\"year\"\u003E2010\u003C\u002Fspan\u003E).\u003Ca class='ref-link' target='_blank' aria-label='CrossRef link for Origami-based robotic paper-and-board packaging for food industry' href=https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.tifs.2009.10.007\u003ECrossRef\u003C\u002Fa\u003E\u003Ca class='ref-link' target='_blank' aria-label='Google Scholar link for Origami-based robotic paper-and-board packaging for food industry' href=https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Origami-based+robotic+paper-and-board+packaging+for+food+industry&author=Dai+J.+S.&author=Caldwell+D.+G.&publication+year=2010&journal=Trend+Food+Sci+Tech&volume=21&doi=10.1016%2Fj.tifs.2009.10.007&pages=153-157\u003EGoogle Scholar\u003C\u002Fa\u003E",item:[{googleScholarLink:"https:\u002F\u002Fscholar.google.com\u002Fscholar_lookup?title=Origami-based+robotic+paper-and-board+packaging+for+food+industry&author=Dai+J.+S.&author=Caldwell+D.+G.&publication+year=2010&journal=Trend+Food+Sci+Tech&volume=21&doi=10.1016%2Fj.tifs.2009.10.007&pages=153-157",openUrlParams:{genre:e,atitle:fN,jtitle:fO,title:fO,volume:ae,artnum:"65f99c3d442f7c0001a34863",spage:"153",epage:au,date:ad,sid:d,aulast:a,aufirst:a,doi:fP,au:a},innerRefId:"r162",title:fN,doi:fP,crossRefLink:"https:\u002F\u002Fdx.doi.org\u002F10.1016\u002Fj.tifs.2009.10.007",pubMedLink:a}]}],figures:[{contentId:"f1",label:"Figure 1.",description:"\u003Cspan class=\"p\"\u003EGraphical representation of the six research areas and sub-areas dealt with within the PRISMA Lab at the University of Naples Federico II. This article proposes an overview of the main problems addressed in these fields and discuss potential future directions on the topics.\u003C\u002Fspan\u003E",thumbnailSrc:"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary-alt:20240319140930-60176-mediumThumb-png-S026357472400033X_fig1.jpg",enlargedSrc:"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary-alt:20240319140930-79078-optimisedImage-png-S026357472400033X_fig1.jpg",attrib:[]},{contentId:"tblI",label:"Table I.",description:"\u003Cspan class=\"p\"\u003ESummary of PRISMA Lab contributions in the field of dynamic manipulation and locomotion.\u003C\u002Fspan\u003E",thumbnailSrc:fQ,enlargedSrc:fQ,attrib:[]},{contentId:"f2",label:"Figure 2.",description:"\u003Cspan class=\"p\"\u003ETray-based and pushing non-prehensile object manipulation scenarios. Upper row: a robot is tasked with transporting an object placed on a tray-like end-effector along a predefined, fast trajectory while avoiding the relative sliding (a) [20]. The robot performs a linear transporting trajectory while opportunely inclining the tray to improve the robustness of the task performance (b) [19]. Bottom row: an object is pushed by a mobile robot along a trajectory (c) [16]. Multiple robots can push an object with minimal effort by optimally placing themself around it (d) [17].\u003C\u002Fspan\u003E",thumbnailSrc:"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary-alt:20240319140930-36577-mediumThumb-png-S026357472400033X_fig2.jpg",enlargedSrc:"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary-alt:20240319140930-68785-optimisedImage-png-S026357472400033X_fig2.jpg",attrib:[]},{contentId:"f3",label:"Figure 3.",description:"\u003Cspan class=\"p\"\u003EOn the left, a quadruped robot is connected to a human through a leash. This scenario was tested in the Gazebo simulation environment emulating a guide dog helping a visually impaired person. In the middle, a legged manipulator transports an object placed on a tray-like end-effector while simultaneously preventing it from sliding. On the right, the model behind this task, where the object (red cube) is prevented from sliding by keeping contact forces (blue) inside the friction cones (green).\u003C\u002Fspan\u003E",thumbnailSrc:"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary-alt:20240319140930-77421-mediumThumb-png-S026357472400033X_fig3.jpg",enlargedSrc:"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary-alt:20240319140930-91771-optimisedImage-png-S026357472400033X_fig3.jpg",attrib:[]},{contentId:"tblII",label:"Table II.",description:"\u003Cspan class=\"p\"\u003ESummary of PRISMA Lab contributions in the field of aerial robotics.\u003C\u002Fspan\u003E",thumbnailSrc:fR,enlargedSrc:fR,attrib:[]},{contentId:"f4",label:"Figure 4.",description:"\u003Cspan class=\"p\"\u003ETwo unmanned aerial manipulators during non-destructive test measurements. On the left, an aerial vehicle equipped with one arm is measuring the thickness of a wall with an ultrasonic probe. On the right, a hybrid drone equipped with a specially developed omnidirectional mobile base that can land on pipelines and then move to position ad-hoc measurement systems for non-destructive test measures.\u003C\u002Fspan\u003E",thumbnailSrc:"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary-alt:20240319140930-16354-mediumThumb-png-S026357472400033X_fig4.jpg",enlargedSrc:"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary-alt:20240319140930-93246-optimisedImage-png-S026357472400033X_fig4.jpg",attrib:[]},{contentId:"tblIII",label:"Table III.",description:"\u003Cspan class=\"p\"\u003ESummary of PRISMA Lab contributions in the field of physical human-robot interaction.\u003C\u002Fspan\u003E",thumbnailSrc:fS,enlargedSrc:fS,attrib:[]},{contentId:"f5",label:"Figure 5.",description:"\u003Cspan class=\"p\"\u003E(a) cognitive control framework compatible with AI methods for planning, reasoning, and learning; (b) task orchestration and situated interpretation of ambiguous human gestures; (c) kinesthetic teaching of structured tasks; combined task and motion plans (d); human-robot collaboration during the execution of a shared task (e).\u003C\u002Fspan\u003E",thumbnailSrc:"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary-alt:20240319140930-43987-mediumThumb-png-S026357472400033X_fig5.jpg",enlargedSrc:"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary-alt:20240319140930-02137-optimisedImage-png-S026357472400033X_fig5.jpg",attrib:[]},{contentId:"f6",label:"Figure 6.",description:"\u003Cspan class=\"p\"\u003EA shared control telerobotic system consists of a local device used to jointly send partial commands and receive computed haptic information as feedback from the remote side. The user usually observes the remote environment by means of a camera that provides a limited awareness of the scene. In (a), the robot must execute a remote object grasping task [91]. In this case, provided haptic information aims to increase the situational awareness of the operator informing about the proximity to the robot’s joint limits and singularities. In (b) and (c), vision-based or programmed virtual fixtures aid the execution of the task in industrial and surgical robotic settings, respectively [92, 93]. In (d), a non-prehensile object transportation scenario is considered and haptic feedback is provided about the proximity to the sliding conditions of the object placed on the tray [19].\u003C\u002Fspan\u003E",thumbnailSrc:"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary-alt:20240319140930-82955-mediumThumb-png-S026357472400033X_fig6.jpg",enlargedSrc:"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary-alt:20240319140930-77865-optimisedImage-png-S026357472400033X_fig6.jpg",attrib:[]},{contentId:"tblIV",label:"Table IV.",description:"\u003Cspan class=\"p\"\u003ESummary of PRISMA Lab contributions in the field of AI and cognitive robotics.\u003C\u002Fspan\u003E",thumbnailSrc:fT,enlargedSrc:fT,attrib:[]},{contentId:"tblV",label:"Table V.",description:"\u003Cspan class=\"p\"\u003ESummary of PRISMA Lab contributions in the field of industrial robotics.\u003C\u002Fspan\u003E",thumbnailSrc:fU,enlargedSrc:fU,attrib:[]},{contentId:"f7",label:"Figure 7.",description:"\u003Cspan class=\"p\"\u003EOverall picture of the logistic scenario including an abstract representation of vision-based recognition and localization algorithm (left), snapshot of the robotic depalletizing cell (right) with highlighted detail of the gripping tool (red window).\u003C\u002Fspan\u003E",thumbnailSrc:"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary-alt:20240319140930-29125-mediumThumb-png-S026357472400033X_fig7.jpg",enlargedSrc:"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary-alt:20240319140930-72110-optimisedImage-png-S026357472400033X_fig7.jpg",attrib:[]},{contentId:"tblVI",label:"Table VI.",description:"\u003Cspan class=\"p\"\u003ESummary of PRISMA Lab contributions in the field of medical robotics.\u003C\u002Fspan\u003E",thumbnailSrc:fV,enlargedSrc:fV,attrib:[]},{contentId:"f8",label:"Figure 8.",description:"\u003Cspan class=\"p\"\u003ELeft: a marker-less method tracks surgical tools, establishing VF geometry resembling to a cylinder with its central axis aligned with the instrument’s axis [144]; right: the MUSHA Hand II surgical tool, integrated on the dVRK robot [145–147].\u003C\u002Fspan\u003E",thumbnailSrc:"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary-alt:20240319140930-90309-mediumThumb-png-S026357472400033X_fig8.jpg",enlargedSrc:"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary-alt:20240319140930-63707-optimisedImage-png-S026357472400033X_fig8.jpg",attrib:[]},{contentId:"f9",label:"Figure 9.",description:"\u003Cspan class=\"p\"\u003EThe PRISMA Hand II and its capabilities. The grasping options are categorized into three sets: (a) lateral grasps, (b) pinches, and (c) power grasps [154, 155].\u003C\u002Fspan\u003E",thumbnailSrc:"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary-alt:20240319140930-38486-mediumThumb-png-S026357472400033X_fig9.jpg",enlargedSrc:"https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary-alt:20240319140930-72669-optimisedImage-png-S026357472400033X_fig9.jpg",attrib:[]}]},pdf:{standardResolution:{fileUrl:"\u002Fcore\u002Fservices\u002Faop-cambridge-core\u002Fcontent\u002Fview\u002F5DA1E6B0701411F71E5FFC40F2E53847\u002FS026357472400033Xa.pdf\u002Frobotics-goes-prisma.pdf",fileSizeInMb:p,articleTitle:K,slugTitle:"robotics-goes-prisma"},highResolution:f,media:f},classification:[],supplementaryMaterials:[],relations:{corrections:[],correctionsOriginals:[],retractions:[],retractionsOriginals:[],addendums:[],addendumsOriginals:[],hasAnyRelations:c,hasAnyOriginalArticle:c},settings:{hasAccess:b,isOpenAccess:c,displayRightsLink:b,shouldDisplayCrossMark:c,shouldDisplayNasaAds:c,suppressPdf:c,isShareable:b,isAnnotationsEnabled:c,disableArticleCommentary:c,displayArticleCommentaryAsDiscussionLinks:c,isCommentsEnabled:c,hasContent:b,shouldDisplaySubmitContent:c,isResearchDirections:c,isQuestionCollection:c,isMathjaxEnabled:b},citationCount:V,openUrlParams:"?genre=article&atitle=Robotics%20goes%20PRISMA&jtitle=Robotica&title=Robotica&spage=1&epage=28&sid=https%3A%2F%2Fwww.cambridge.org%2Fcore&aulast=Selvaggio&aufirst=Mario&doi=10.1017\u002FS026357472400033X",ecommerceProducts:{digitalSku:F,paperBackSku:f,hardBackSku:f},subject:[],permissionUrl:"https:\u002F\u002Fs100.copyright.com\u002FAppDispatchServlet?publisherName=CUP&publication=ROB&title=Robotics%20goes%20PRISMA&publicationDate=20%20March%202024&author=Mario%20Selvaggio%2C%20Rocco%20Moccia%2C%20Pierluigi%20Arpenti%2C%20Riccardo%20Caccavale%2C%20Fabio%20Ruggiero%2C%20Jonathan%20Cacace%2C%20Fanny%20Ficuciello%2C%20Alberto%20Finzi%2C%20Vincenzo%20Lippiello%2C%20Luigi%20Villani%2C%20Bruno%20Siciliano©right=%C2%A9%20The%20Author(s)%2C%202024.%20Published%20by%20Cambridge%20University%20Press&contentID=10.1017%2FS026357472400033X&startPage=1&endPage=28&orderBeanReset=True&volumeNum=&issueNum=&oa="},breadcrumbs:[{name:"Home",url:"\u002Fcore"},{name:"Journals",url:"\u002Fcore\u002Fpublications\u002Fjournals"},{name:L,url:ay},{name:"FirstView",url:az},{name:K}],lang:aA,isShare:c,coreCmsConfig:{shouldUseShareProductTool:b,shouldUseHypothesis:b,isUnsiloEnabled:b},debugHostName:"page-component-586b7cd67f-gb8f7",debugTotalLoadingTime:V,debugRenderDate:"2024-11-23T19:20:22.433Z",debugHasDataIssue:c,debugHasContentIssue:c}],fetch:{},error:f,state:{errors:{hasAnyIssue:c,hasContentIssue:c},tabs:{supportedTabs:[],currentTab:a,stickyTabsEnabled:c},unleash:{repo:[{name:"useOrcidAuthorSync",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"EnableLogObfuscation",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"HE.useDefaultDigitalCopy",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"wayf.newGlobalHeader.showDiscoveryTool",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"CorePageComponentUseNewCombobox",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"HE.newCitationApi",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"HE.loadSupMatFolderMetrics",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"EnableJmsMessagesPolling",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"UseSocialShareButton",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"UseBlog",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"HE.useStudyLevels",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"useNewGlobalHeader",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"wayf.newGlobalHeader",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"authentication.useNewGlobalHeader",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"HE.platformHeader-micro-ui",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"DT.newGlobalHeader",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"checkout.useNewGlobalHeaderUrl",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"checkout.newGlobalHeader.showDiscoveryTool",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"showSearchDashboardVersionAndPublishedDate",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"CorePageComponentGetUserInfoFromSharedSession",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"UseB2bPages",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"CoreDisplayKBARTAutomation",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"HE.useQuickAccessPanel",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"CoreDisplayResearchOpen",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"EnableInternalJournalsSqsPolling",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"sendEmailsToTargetRecipients",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"EnableFirstProcessReminderPolling",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"he.resourcesWidgetLandingPage",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"HE.useNewTitlesSections",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"EnableContinueProcessReminderPolling",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"CoreUseCitationToolApi",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"EnableSecondProcessReminderPolling",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"CoreUseKbartMetafile",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"shoppingCart.newGlobalHeader.showDiscoveryTool",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"EnableInternalArticlesSqsPolling",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"CoreDisplayGlobalHeader",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"checkout.newGlobalHeader",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"UseKeycloakAuth",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"Ecommerce",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"UseGroupBooking",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"CoreUseOmnichannel",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"EnableZendeskRedirects",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"EnablePostPaymentEnrolment",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"authentication.useNewGlobalHeader.showDiscoveryTool",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"DisableEduframe",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"DT.useUsageEvents",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"showSearchDashboardSortDropdown",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"car-sup-mats",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"HE.useFixedLoginUrl",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"UsePayPalPayment",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"HE.NewReadAsGuestPopup",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"cart.useNewGlobalHeaderUrl",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"UseCourseWelcomeEmail",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"HE.Omnichannel",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"EnableInternalArticlesPostProcessingSqsPolling",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"homsy.Omnichannel",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"HE.verifySalesforceApiKey",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"UsePayByQuote",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"EnableEmailMessagesSqsPolling",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"HE.useRedesignedPill",enabled:b,variant:{name:g,enabled:c},impressionData:c},{name:"CoreUseNewCms",enabled:b,variant:{name:g,enabled:c},impressionData:c}]},i18n:{routeParams:{}}},serverRendered:b,routePath:"\u002Fcore\u002Fjournals\u002FjournalName\u002Farticle\u002FarticleName\u002F5DA1E6B0701411F71E5FFC40F2E53847",config:{assetsService:"https:\u002F\u002Fstatic.cambridge.org",publicApi:"https:\u002F\u002Fwww.cambridge.org\u002Fcore\u002Fpage-component\u002Fapi\u002Fv2",domainUrl:d,shibbolethUrl:"https:\u002F\u002Fshibboleth.cambridge.org",seamlessAccessPs:"https:\u002F\u002Fservice.seamlessaccess.org\u002Fps\u002F",seamlessAccessContext:"seamlessaccess.org",shareaholicScript:"https:\u002F\u002Fcdn.shareaholic.net\u002Fassets\u002Fpub\u002Fshareaholic.js",shareaholicSiteId:"b60ec523a5bee2ad04c630bf0d3aa388",shareaholicAppId:33113081,unleashProxyUrl:"https:\u002F\u002Fcore-features.cambridge.org\u002Fproxy",unleashProxyClientKey:"B43LrdgqNKlNsaHfVzQ7l78gkVH0K7tf",cloudflareAuth:a,_app:{basePath:"\u002F",assetsPath:"\u002Fcore\u002Fpage-component\u002F",cdnURL:f}}}}("",true,false,"https:\u002F\u002Fwww.cambridge.org","article",null,"disabled","unknown","IEEE Robot Auto Lett","2022","2019","2020","IEEE Trans Robot","2018","chapter",1,"2023","Robot Auton Syst","IEEE Robot Autom Lett","2021","Department of Electrical Engineering and Information Technology, University of Naples Federico II, Naples, Italy","2016","Int J Robot Res","Auton Robot","Mech Mach Theory","3","2017","Springer","2015","2014","1","S026357472400033X","2008","IFAC-PapersOnLine","6","5","Robotics goes PRISMA","Robotica","article_rob_ind_pur","38","9","IEEE Trans Contr Syst Tech","2013","IEEE Robot Autom Mag","Drones","Psychol Rev","IEEE\u002FASME Trans Mech",0,"2004","Proceeding IFAC Triennal World Congress","2002","IEEE Robotics & Automation Magazine","2007","8","Robot Dynamic Manipulation. Perception of Deformable Objects and Nonprehensile Manipulation Control","Informatics in Control, Automation and Robotics, Lecture Notes in Electrical Engineering","Unmanned Aerial Vehicles and Micro Aerial Vehicles","2010","21","book","Advances in Unmanned Aerial Vehicles: State of the Art and the Road to Autonomy volume 33 of Intelligent Systems, Control and Automation: Science and Engineering","Handbook of Unmanned Aerial Vehicles","Aerial Robotic Systems Physically Interacting with the Environment (AIRPHARO)","2","Human-Robot Interaction in Social Robotics","AAAI Mobile Robot Competition","IEEE International Workshop on Robot and Human Interactive Communication (ROMAN)","2005","Multi-Robot Systems. From Swarms to Intelligent Automata","Robotics Science and Systems","ECAI","ICAPS-2015","Human-Friendly Robotics 2020: 13th International Workshop","AIxIA 2022–Advances in Artificial Intelligence: XXIst International Conference of the Italian Association for Artificial Intelligence, AIxIA 2022","157","Elemento Terminale Per Dispositivi Di Presa Per Interventi Chirurgici, in Particolare Interventi a Minima Invasività","9D09B9EC51F7B2908BE018873570F515","robotica","\u002Fcore\u002Fjournals\u002Frobotica","\u002Fcore\u002Fjournals\u002Frobotica\u002Ffirstview","en","A survey of robot manipulation in contact","10.1016\u002Fj.robot.2022.104224","Dynamic Legged Manipulation of a Ball Through Multi-Contact Optimization","Control of nonprehensile planar rolling manipulation: A passivity-based approach","35","317","329","10.1109\u002FTRO.2018.2887356","Object rearrangement through planar pushing: A theoretical analysis and validation","IEEE T Robot","10.1109\u002FTRO.2022.3153785","Coordinate-Free Framework for Robotic Pizza Tossing and Catching","Nonprehensile dynamic manipulation: A survey","10.1109\u002FLRA.2018.2801939","Forces acting on a biped robot. center of pressure-zero moment point","IEEE Trans Syst, Man, Cyber - Part A: Syst Humans","34","10.1109\u002FTSMCA.2004.832811","Review and descriptive investigation of the connection between bipedal locomotion and non-prehensile manipulation","Annu Rev Control","53","51","69","10.1016\u002Fj.arcontrol.2022.04.009","Passive dynamic walking","62","10.1177\u002F027836499000900206","Kinetic Energy Shaping for Gait Regulation of Underactuated Bipeds","30","40","Novel method for preventing shin-collisions in six-legged robots by utilising a robot–terrain interference model","151","10.1016\u002Fj.mechmachtheory.2020.103897","Contact Model Fusion for Event-Based Locomotion in Unstructured Terrains","Passive whole-body control for quadruped robots: Experimental validation over challenging terrain","4","10.1109\u002FLRA.2019.2908502","Linear Time-Varying mpc for Nonprehensile Object Manipulation with a Nonholonomic Mobile Robot","Task-Oriented Contact Optimization for Pushing Manipulation with Mobile Robots","Nonprehensile Object Transportation with a Legged Manipulator","A shared-control teleoperation architecture for nonprehensile object transportation","10.1109\u002FTRO.2021.3086773","Non-prehensile object transportation via model predictive non-sliding manipulation control","IEEE Trans Contr Syst T","31","10.1109\u002FTCST.2023.3277224","A non-prehensile object transportation framework with adaptive tilting based on quadratic programming","10.1109\u002FLRA.2023.3268594","Passivity-based control for a rolling-balancing system: The nonprehensile disk-on-disk","25","10.1109\u002FTCST.2016.2637719","A new laparoscopic tool with in-hand rolling capabilities for needle reorientation","10.1109\u002FLRA.2018.2809443","Nonprehensile manipulation of an underactuated mechanical system with second-order nonholonomic constraints: The robotic hula-hoop","10.1109\u002FLRA.2018.2792403","Hybrid visual servoing with hierarchical task composition for aerial manipulation","10.1109\u002FLRA.2015.2510749","Control of nonprehensile rolling manipulation: Balancing a disk on a disk","10.1109\u002FTRO.2013.2262775","On the Experiments about the Nonprehensile Reconfiguration of a Rolling Sphere on a Plate","Cosed-Loop Control of a Nonprehensile Manipulation System Inspired by a Pizza-Peel Mechanism","10.1007\u002F978-3-030-93290-9_9","Nonprehensile manipulation of deformable objects: Achievements and perspectives from the roDyMan project","10.1109\u002FMRA.2017.2781306","Energy Pumping-and-Damping for Gait Robustification of Underactuated Planar Biped Robots Within the Hybrid Zero Dynamics Framework","Uniform global exponential stabilizing passivity-based tracking controller applied to planar biped robots","Interconnection and Damping Assignment Passivity-Based Control for Gait Generation in Underactuated Compass-like Robots","A constructive methodology for the IDA-PBC of underactuated 2-doF mechanical systems with explicit solution of PDEs","Int J Control, Auto Syst","297","10.1007\u002Fs12555-020-0839-1","Tethering a Human with a Quadruped Robot: A Guide Dog to Help Visually Impaired People","Disturbance Rejection for Legged Robots Through a Hybrid Observer","Whole-body control with disturbance rejection through a momentum-based observer for quadruped robots","10.1016\u002Fj.mechmachtheory.2021.104412","The effect of shapes in input-state linearization for stabilization of nonprehensile planar rolling dynamic manipulation","10.1109\u002FLRA.2016.2519147","Keep it upright: Model predictive control for nonprehensile object transportation with obstacle avoidance on a mobile manipulator","10.1109\u002FLRA.2023.3324520","A Solution to Slosh-Free Robot Trajectory Optimization","Shared Autonomy Control for Slosh-Free Teleoperation","Push-manipulation of complex passive mobile objects using experimentally acquired motion models","10.1007\u002Fs10514-014-9414-z","A model predictive approach for online mobile manipulation of non-holonomic objects using learned dynamics","10.1177\u002F0278364921992793","Robust trajectory tracking for a scale model autonomous helicopter","Int J Robust Nonlin","14","10.1002\u002Frnc.931","Aerial Grasping of a Moving Target with a Quadrotor UAV","Experimental Validation of a New Adaptive Control Scheme for Quadrotors MAVs","Adaptive control of quadrotor UAVs: A design trade study with flight evaluations","10.1109\u002FTCST.2012.2200104","Adaptive position tracking of VTOL UAVs","2011","10.1109\u002FTRO.2010.2092870","A Nonlinear Force Observer for Quadrotors and Application to Physical Interactive Tasks","Passivity-based adaptive attitude control of a rigid spacecraft","IEEE Trans Automat Contr","10.1109\u002F9.286266","Passivity-based adaptive backstepping control of quadrotor-type UAVs","10.1016\u002Fj.robot.2014.03.019","10.1007\u002F978-90-481-9707-1","Past, present, and future of aerial robotic manipulators","10.1109\u002FTRO.2021.3084395","Aerial manipulation: A literature review","10.1109\u002FLRA.2018.2808541","Passivity-based control of vtol uavs with a momentum-based estimator of external wrench and unmodeled dynamics","10.1016\u002Fj.robot.2015.05.006","Active disturbance rejection control for the robust flight of a passively tilted hexarotor","Globally attractive hyperbolic control for the robust flight of an actively tilting quadrotor","Emergency Landing for a Quadrotor in Case of a Propeller Failure: A Backstepping Approach","Emergency Landing for a Quadrotor in Case of a Propeller Failure: A PID Based Approach","Modelling and identification methods for simulation of cable-suspended dual-arm robotic systems","10.1016\u002Fj.robot.2024.104643","A Multilayer Control for Multirotor uavs Equipped with a Servo Robot Arm","A novel articulated rover for industrial pipes inspection tasks","A Hardware-in-the- Loop Simulator for Physical Human-Aerial Manipulator Cooperation","Image-based visual-impedance control of a dual-arm aerial manipulator","10.1109\u002FLRA.2018.2806091","Development of a Control Framework to Autonomously Install Clip Bird Diverters on High-Voltage Lines","Autonomy in physical human-robot interaction: A brief survey","10.1109\u002FLRA.2021.3100603","A hierarchical human-robot interaction-planning framework for task allocation in collaborative industrial assembly processes","10.1109\u002FLRA.2016.2535907","Combining human guidance and structured task execution during physical human–robot collaboration","J Intell Manuf","10.1007\u002Fs10845-022-01989-y","Interactive plan execution during human-robot cooperative manipulation","10.1016\u002Fj.ifacol.2018.11.584","Human-robot interaction: A survey","Found Trends® Human-Comp Inter","10.1561\u002F1100000005","Medical robotics 2014;regulatory, ethical, and legal considerations for increasing levels of autonomy","Sci Robot","10.1126\u002Fscirobotics.aam8638","10.1201\u002Fb13004","Shared autonomy–learning of joint action and human-robot collaboration","Front Neurorobotics","16","10.3389\u002Ffnbot.2019.00016","33","37","Sliding Autonomy for Peer-to-Peer Human-Robot Teams","2000","Adjustable Control Autonomy for Manned Space Flight","Semi-Autonomous Stability Control and Hazard Avoidance for Manned and Unmanned Ground Vehicles","Towards Perceptual Shared Autonomy for Robotic Mobile Manipulation","A Policy-Blending Formalism for Shared Control","10.1177\u002F0278364913490324","Adaptive virtual fixtures for machine-assisted teleoperation tasks","Characterizing Efficiency of Human Robot Interaction: A Case Study of Shared-Control teleoperation","Haptic-based shared-control methods for a dual-arm system","10.1109\u002FLRA.2018.2864353","Passive task-prioritized shared-control teleoperation with haptic guidance","Passive virtual fixtures adaptation in minimally invasive robotic surgery","10.1109\u002FLRA.2018.2849876","Enhancing bilateral teleoperation using camera-based online virtual fixtures generation","Haptic-guided shared control for needle grasping optimization in minimally invasive robotic surgery","Human-robot interaction review: Challenges and solutions for modern industrial environments","IEEE Access","10.1109\u002FACCESS.2021.3099287","Everyday activities","Know rob 2.0–a 2nd Generation Knowledge Processing Framework for Cognition-Enabled Robotic Agents","Artificial cognition for social human–robot interaction: An implementation","Artif Intell","10.1016\u002Fj.artint.2016.07.002","Reconfigurable Behavior Trees: Towards an Executive framework meeting high-level decision making and control layer features","Model-based control architecture for attentive robots in rescue scenarios","24","87","10.1007\u002Fs10514-007-9055-6","Conflict monitoring and cognitive control","2001","10.1037\u002F0033-295X.108.3.624","Contention scheduling and the control of routine activities","Cogn Neuropsychol","10.1080\u002F026432900380427","Hierarchical schemas and goals in the control of sequential behavior","10.1037\u002F0033-295X.113.4.887","Learning Object Manipulation Skills via Approximate State Estimation from Real Videos","A Survey on Semantic-Based Methods for the Understanding of Human Movements","10.1016\u002Fj.robot.2019.05.013","Combining task and motion planning: Challenges and guidelines","Front Robot AI","10.3389\u002Ffrobt.2021.637888","Attentional Multimodal Interface for Multidrone Search in the Alps","Attentional Supervision of Human-Robot Collaborative Plans","Plan Execution and Attentional Regulations for Flexible Human-Robot Interaction","Flexible task execution and attentional regulations in human-robot interaction","IEEE Trans Cogn Develp Syst","79","10.1109\u002FTCDS.2016.2614690","10.1007\u002F978-3-030-71356-0_4","A robotic cognitive control framework for collaborative task execution and learning","Top Cogn Sci","10.1111\u002Ftops.12587","Attentional Regulations in a Situated Human-Robot Dialogue","Learning attentional regulations for structured tasks execution in robotic cognitive control","43","10.1007\u002Fs10514-019-09876-x","Kinesthetic teaching and attentional supervision of structured tasks in human–robot interaction","10.1007\u002Fs10514-018-9706-9","Imitation Learning and Attentional Supervision of Dual-Arm Structured Tasks","A multi-robot deep Q-learning framework for priority-based sanitization of railway stations","Appl Intell","10.1007\u002Fs10489-023-04529-0","A rapidly-exploring random trees approach to combined task and motion planning","10.1016\u002Fj.robot.2022.104238","Attention to action: Willed and automatic control of behavior","Robotics-Logistics: Challenges for Automation of Logistic Processes","Real-time planning robotic palletizing tasks using reusable roadmaps","J Robot, Network Art Life","10.2991\u002Fjrnal.k.200222.009","Ai-Based Learning Approach with Consideration of Safety Criteria on Example of a Depalletization Robot","High-Speed and Compact Depalletizing Robot Capable of Handling Packages Stacked Complicatedly","Rgb-d object detection and semantic segmentation for autonomous manipulation in clutter","10.1177\u002F0278364917713117","An Efficient Depalletizing System Based on 2d Range Imagery","The next step in robot commissioning: Autonomous picking and palletizing","10.1109\u002FLRA.2016.2519944","Suction pad unit using a bellows pneumatic actuator as a support mechanism for an end effector of depalletizing robots","ROBOMECH Journal","7","10.1186\u002Fs40648-019-0151-0","Application for Automatic Programming of Palletizing Robots","A flexible robotic depalletizing system for supermarket logistics","10.1109\u002FLRA.2020.3000427","Rgb-d recognition and localization of cases for robotic depalletizing in supermarkets","IEEE Robotics and Automation Letters","10.1109\u002FLRA.2020.3013936","A reconfigurable gripper for robotic autonomous depalletizing in supermarket logistics","10.1109\u002FLRA.2020.3003283","Active constraints\u002FVirtual fixtures: A survay","10.1109\u002FTRO.2013.2283410","Spatial motion constraints using virtual fixtures generated by anatomy","10.1109\u002FTRO.2006.886838","Dynamic active constraints for surgical robots using vector field inequalities","10.1109\u002FTRO.2019.2920078","Control Barrier Functions: Theory and Applications","A surgical palpation probe with 6-axis force\u002Ftorque sensing capability for minimally invasive surgery","IEEE Trans Ind Electron","10.1109\u002FTIE.2017.2739681","A laparoscopic grasping tool with force sensing capability","Adaptive synergies for the design and control of the pisa\u002Fiit softhand","10.1177\u002F0278364913518998","The softhand pro-h: A hybrid body-controlled, electrically powered hand prosthesis for daily living and working","10.1109\u002FMRA.2017.2751662","Vision-Based Virtual Fixtures Generation for Robotic-Assisted Polyp Dissection Procedures","Vision-based dynamic virtual fixtures for tools collision avoidance in robotic surgery","10.1109\u002FLRA.2020.2969941","The MUSHA hand II: A multi-functional hand for robot-assisted laparoscopic surgery","The musha underactuated hand for robot-aided minimally invasive surgery","Int J Med Robot Comp Assis Surg","10.1002\u002Frcs.1981","Autonomous Endoscope Control Algorithm with Visibility and Joint Limits Avoidance Constraints for Da Vinci Research kit robot","A Portable Da Vinci Simulator in Virtual Reality","Portable dVRK: An augmented V-REP simulator of da vinci research kit","Acta Polytech Hung","Stiffness modeling of the soft-finger contact in robotic grasping","J Mech Design","10.1115\u002F1.1758255","Prototype realization of a human hand-inspired needle driver for robotic-assisted surgery","IEEE Trans Med Robot Bio","10.1109\u002FTMRB.2023.3309942","Planning of soft-rigid hybrid arms in contact with compliant environment: Application to the transrectal biopsy of the prostate","10.1109\u002FLRA.2022.3152322","Calibration of Tactile\u002FForce Sensors for Grasping with the PRISMA Hand II","Development and testing of a virtual simulator for a myoelectric prosthesis prototype – the prisma hand ii – to improve its usability and acceptability","Eng Appl Artif Intel","10.1016\u002Fj.engappai.2023.105853","Legged robots for object manipulation: A review","Front Mech Eng","10.3389\u002Ffmech.2023.1142421","Synthesis of a novel type of metamorphic mechanism module for large scale deployable grasping manipulators","10.1016\u002Fj.mechmachtheory.2018.06.017","Type synthesis of plane-symmetric deployable grasping parallel mechanisms using constraint force parallelogram law","10.1016\u002Fj.mechmachtheory.2021.104330","Alma - Articulated Locomotion and Manipulation for a Torque-Controllable Robot","Roloma: Robust loco-manipulation for quadruped robots with arms","10.1007\u002Fs10514-023-10146-0","Visual and Haptic Cues for Human-Robot Handover*","Origami-based robotic paper-and-board packaging for food industry","Trend Food Sci Tech","10.1016\u002Fj.tifs.2009.10.007","https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab1.png","https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab2.png","https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab3.png","https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab4.png","https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab5.png","https:\u002F\u002Fstatic.cambridge.org\u002Fbinary\u002Fversion\u002Fid\u002Furn:cambridge.org:id:binary:20240319140752866-0168:S026357472400033X:S026357472400033X_tab6.png"));</script><script src="/core/page-component/06cd607.js" defer></script><script src="/core/page-component/20147ec.js" defer></script><script src="/core/page-component/722ace3.js" defer></script><script src="/core/page-component/6f4302c.js" defer></script> </div> </div> <div id="article-new-home-productCitations" class="product-citations-modal reveal-modal medium" data-reveal role="dialog" aria-labelledby="article-new-home-citedByModalHeader"> <div class="header"> <div class="heading_07 margin-bottom" id="article-new-home-citedByModalHeader">Cited by</div> </div> <div class="citation-content"> <div class="row collapse header margin-top productCitations-content"> <ul class="small-12 small-centered columns citations"> <li class="fade-in section-container active"> <a href="#" class="section-button" data-id="article-new-home-crossref-citations"> <div class="circular medium citation"> <img src="https://assets.crossref.org/logo/crossref-logo-100.png" alt="Crossref logo"> <span class="citation-count">0</span> </div> </a> </li> <li class="fade-in section-container"> <a href="#" class="section-button" data-id="article-new-home-scholar-citations"> <div class="circular medium citation"> <img src="https://upload.wikimedia.org/wikipedia/commons/a/a9/Google_Scholar_logo_2015.PNG" alt="Google Scholar logo"> </div> </a> </li> </ul> </div> <div class="row wrapper no-padding-top section-content"> <div class="small-10 small-centered columns"> <section class="content" id="article-new-home-crossref-citations"> <div class="large-12 columns"> <div class="panel margin-top-small citation-container"> <div class="row"> <div class="medium-12 columns"> <p>No CrossRef data available.</p> </div> </div> </div> </div> </section> <section style="display:none" class="content" id="article-new-home-scholar-citations"> <div class="large-12 columns"> <div class="panel margin-top-small citation-container"> <div class="row"> <div class="medium-12 columns"> <div class="print-only print-heading margin-bottom-large">Google Scholar Citations</div> <p>View all <a href="https://scholar.google.com/scholar?hl=en&lr=&cites=https://www.cambridge.org/core/journals/robotica/article/robotics-goes-prisma/5DA1E6B0701411F71E5FFC40F2E53847" target="_blank">Google Scholar citations</a> for this article. </p> </div> </div> </div> </div> </section> </div> </div> </div> <a href="#" class="close-reveal-modal" aria-label="Close cited by"><span aria-hidden="true">×</span></a> </div> <script> $(document).ready(function () { $('body').on('click', '#article-new-home-productCitations .section-button', function (e) { e.preventDefault(); var id = $(this).attr('data-id'); $('#article-new-home-productCitations .section-content section').hide(); $('#article-new-home-productCitations .section-container').removeClass('active'); $(this).parent('.section-container').addClass('active'); $('#' + id).show(); }); }); </script> <div id='platform-footer'> <div class="__shared-elements-html ShEl"><div class="__shared-elements-head"> <link rel="stylesheet" href="/aca/shared-elements/_nuxt/entry.BhGMTrWu.css"> <link rel="prefetch" as="style" href="/aca/shared-elements/_nuxt/error-404.B06nACMW.css"> <link rel="prefetch" as="style" href="/aca/shared-elements/_nuxt/error-500.WGRfNq7F.css"> </div><div class="__shared-elements-body"><div id="__sharedElements-omp9t"><!--[--><!----><div class="apl"><footer class="apl-footer apl-theme--core" data-v-20efd383><section class="apl-footer__top" data-v-20efd383><div class="apl-container apl-footer__container-top" data-v-20efd383><!--[--><a href="/" class="apl-footer__logo-link" data-v-20efd383><img src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAALYAAAAmCAMAAABeSJF1AAAB41BMVEUAAAD///////////////////////////////////////////////////8AAAD///////////////8nCAn///8rKyshBwgAAAAAAAA9Dg8QBwgAAAATBAUAAABLFhglJSULAgIAAAAoGRkAAAAAAAAAAAD////////wf4LqTVDv7+/nMzeQICLPz8+tJimgjwDf39+/rACQgAC7KSzZMDO/v7+vr6/KLDBXExSgLiLKLTCvngBgYGCISRGOdgN+aAOAcgCfn59vZACKVA2ceQeehAO9NCmTNhuEMxiZYw2vMSWSKh6jRRuDJxuVQRdsSAushwdcPQeWU1RQUFDZLzPqSTCfIyVlFhh9XAexkAWgoKCchod0GhyGPhWYVxFoLBF5Rg1qOA2reguLXwqMawd/cwCAgIDcSU1FRUAwMDDYSCqwPCKySB+CHB+lTxh1JBh9Xhd2LxSoZhG5eBCbbgpuWQPv1wDfyACQkJCPj4/xjI5wcHBvb2/RXiBpOR5zGRxzGRutVhi9ahaWTBVADg97UQq+oQRQRwD2s7S/vKCfnIDtZmmwp2BsVlevpECSLS9aLS7rVC0wLiCiOh+iOR+hOR+WTRWWTBSGPhRmIRR4OxLNrwRvWQT/5QDQugBvZQBfVgD+CXVrAAAAJ3RSTlMAIIC/QBDfn2CQ76AwcGCvUM9fv2+/319D79+AvxD336cgv7+QcC8qKGXpAAAJA0lEQVRYw9WY5RLbMAyAEyd2oM3Sdsw8dc2a0piZmZmZmZmZmR91MtXt6Lbbfmy6a+IosvyZZKVWh59LN+vflOvDUA4ME7J0ipBHLape1r8po9NRybStY54v3jptZrFaBSjD0HTU8C3TZ+5fPGda2rHVlrqdCPFdo7BNCYVaXz/aSqjSatH+sPgdQyM0RzyPdHKlsRHEnjdnQqV2fNGq1UvGFqEssOdPu1Ebd3zqqiVLxrZgUxI6nXJ+hjFPaVwgzXcMwDHUABDkLMvP8AIJWOhxVPUYy0c047XCAKmIA8A8z3NCp9DeoOvmMhCieZYbMwcFGGJv3vFiam3h5h3XRiXFETsBzleHLpoxZse4S3M+bRiVGOwsC13JlAc1XJkWUheAq/ULUF3LCy3NY8vqkXtgEGbF65w2pKrXdgaYqxuESJaI9BwB+OJFiNiz62Prp2YPnz29PqZYLZdgZGnowhmNqZVDY48snjyziW2HUGgWFR8DMCsGsYk2MOWM7IwPEPBHB0D1MVad1YaIrRnBVQ0S7dsRGh+x5SNir5zYqFduTVw8bfXBIsCIEsDQMdMbtfqtGYsnTD/UxGYtIxtIbD/PYZRQFkJIZdknX2NnkVNhK0j2LbZmlH6YMFEOc23YLmIPn3SzUq9MPjV51VjEHsmxh0+qVOrjJp+cfHm4xs4BFMx6kdixK1qQYjuGlSFN0IatFQY7/0NsH3WiwahlXxlsFUmSeXPe14ZNqp/ckCL2mSpiJ7NfTRy3csbkk/PTjma1Zq12sZmaUAngUBxu2UWPAngGW9yY3YJtAwQ/xKZiKjLCtRGDbROBvbwxfVsyZ9ummwuKpTJAFYaiqjYhmb5o07sxHNvspXYvgdhIGju2AoCCWont2HYQOkhtsAkw+l1svWUot7S/j00E9qhJaxalV45M3z9RYo+EoY1Ja5bMvPZy9ayJczU2fIudtwUGtdQQW7Zs2mYSQmPnHYfFhSa2becyIaHWz7CRmH0XO2QsBIldqY8dM7k+cyNGkpFrAXZXh46qT0LVjHRbZeWPse28jEmR8ukJKBdJC2oDm0VCCbCswiYE+Fj/BNv5MbaP7QYSe9TwJE3HJOnw4UWoApRwS6KqkTaSdCzfkmbi2iTjEEKCZmsRERAOxmgqwoDB1uFBLxKsFPwMm3GjWK9t6gmh7Wv7cdHI7VK1VK2WzraoBoiqxosRlnNRHK0nRFJlcdi/wRZNRhqbinj/sy2JxcBEkkhMj8TWKcV4WAt8QWPIhgeHFx2+ePjiafXID/rOpmrQRu068qb1QaROFWYrPIOt+bTe5fHvZwGwIGfOnF7e1wFwPGdUv6UXVqxIVjxZJ7GFSmNTc5gonkJTL5+Fx1A11o5t4j7qlZZ8H9vMFJ/HH2FnrfGl8aVyaQ+US8tg6fC9V6+uaKzDx5HlZTByxHjENsPt6HLWs6juRKC8O8IjkXQ6fBlsng5JveosZNuxmxNA0FId7nn6A+wMjnYVylCCEfhbunzvtI0rlq/DYln+FLYiksGXEgx9hOkpVd1hRPKgXi1yXXAlS65VH4FkMscpyNmkAbBsM5XChgy2WaiFELEx5pUAxxxwkSQoe9cB4NBDFX8SWx/fSIi5JSPUjgFiwRExQKfZyIHQ43wBETEclWhm+Q4vxAHmo8iiHokadlawA1Eb6xFpiM6dTrTZIE8kfd+XSyobhCAy13wIjsAuA4Ly5fx0/JQpu5eVlaqqsA14jhCSo61fBSq7t7VCXKjUioIumkzfmFi/8JkQBMR3dQ1j1mcoyq6hQs4+vANw/+49rUHpaP2b0j2Zm0zYPmbm0f3pqOKIahlGlIeuHNU4PiGZe3Df3GSg9W9Kl1E7Noz7OGPbhMlT9xVl2Bs6+1i9Vpu1/UblxNh+1r8p/RbUFk6sLZy2YfS8BLHPjUTsOVM3VSort88aNrzR0/o3ZVD6ubGkNnvsnHp9bhEPd564Hpkwr3Ji+KzptWfD/v7/JPSveOl2YNqxqfWpE4/O37Kx+eW+5ujUqZOOLXy79XpvbUc8L8c/n4MsJnteVqQ0XsQvUkOohbecKBLi2WjqBb5XkCE4JhmibOMYgyj+4oDbBqoaFzfwIhKomkEB34giIUEkoiqJuTeSCWLb6t09rW+u1VZXalsaxfJpgPOloWMmThw3bnKtvnluLzNI8lTJBXgJ9LcKVwARGtFyXpyHNmpd+cYOHf3N3EnZcgfcF0WWSDjN6mwnwKoO2trgYR2GGl7Ea4xGjFqUV8fnCEetZ5pemVB5XfkwJimWRwD/ch+Vzq9sOZUuXjSsq5kWie1ySKK4CVeAvDr8WtDY1FZvYtGJEPVaQ7UvynkA4R1qmT8WchBJ1gxkm9hYCvLSR4gVKLVwuJOJtQVp7cSMYfrLfUFlzazKhO2Vy12s72PbBPLUYKM4QLElW2O7+o3DRAJA2v8OMkkqA5egvcF2oSBZY6AaWxy/qi5jynzwqllL9k0YN8Zgb5p0KVkz/+DoPv1/hI1wXht2hDcayKSpk5/xxZvALoQ5CccK38VGFeM6g51BLBti2w1lbg2O30kuNGCuTEw8Cd5zfbKg9ubQOI59rozYSWPj1kmNZHQH64fYOLxBKzYNQ8t3Jbbv5iS2E0OhmV2Q72HLVWSwh0TE5qz5GE0VtuvGOiUSOT0DprhXNealk8alxZKMJMmscavHjuqO1D/GpgyIweaT6GZUqsoblm9oqPNbmwH9Glubt4223ocstHXRUhZ+GCqzQEXBLsOSmbOHY9zGZYLYC+c11vfqb7Vj51uwJUhssPGeL7RwUAkZSRsRcdzfwsa5amJzcLlA0C8PWJ6u0aHLsOFJccoZgD1l/J9kfY9vzhlHdD+T5e75RQY3g+KA3cJBJDZqI9lNn1m/gy33oMa2IyumaOtYNh9osVEMuPpy37W+hwl8Lek26+SLwB8xltOfk9kY8q7+x0El4AzPE8dyM4DubQYe9Z1OneKstJUrPcRuaXMPdUJyTCbxWawZWZTXxCLg0RS6VhT7xOF7yvMzHKIFvO9aQOydBrpdqJul7T1pe/uzUxtz6z8WlYxTdW8H59D/n3T4P6C/APfqZ5MZcurkAAAAAElFTkSuQmCC" alt="Cambridge University Press" class="apl-footer__logo-image" data-v-20efd383></a><div class="apl-footer__nav-and-location" data-v-20efd383><div class="apl-footer__nav" data-v-20efd383><nav class="apl-link-block__nav" data-v-7c018d72 data-v-20efd383><h2 class="apl-link-block__heading" data-v-7c018d72>Our Site</h2><ul class="apl-link-block__list-platforms" data-v-7c018d72><!--[--><li class="apl-link-block__list-item" data-v-7c018d72><a tabindex="0" href="/core/accessibility" class="apl-button apl-button--primary apl-button--lg apl-button--text apl-button--link apl-link-block__btn-link" aria-label="Cambridge Core accessibility page" data-v-7c018d72><!----><!--[--><span>Accessibility</span><!--]--><!----><!----></a></li><li class="apl-link-block__list-item" data-v-7c018d72><a tabindex="0" href="/core/help/FAQs" target="_blank" class="apl-button apl-button--primary apl-button--lg apl-button--text apl-button--link apl-link-block__btn-link" aria-label="Cambridge Core contact & help page" data-v-7c018d72><!----><!--[--><span>Contact & Help</span><!--]--><!----><!----></a></li><li class="apl-link-block__list-item" data-v-7c018d72><a tabindex="0" href="/core/legal-notices/terms" class="apl-button apl-button--primary apl-button--lg apl-button--text apl-button--link apl-link-block__btn-link" aria-label="Cambridge Core legal notices page" data-v-7c018d72><!----><!--[--><span>Legal Notices</span><!--]--><!----><!----></a></li><!--]--></ul></nav><nav class="apl-link-block__nav" data-v-7c018d72 data-v-20efd383><h2 class="apl-link-block__heading" data-v-7c018d72>Our Platforms</h2><ul class="apl-link-block__list-platforms" data-v-7c018d72><!--[--><li class="apl-link-block__list-item" data-v-7c018d72><a tabindex="0" href="https://www.cambridge.org/core/" target="_blank" class="apl-button apl-button--primary apl-button--lg apl-button--text apl-button--link apl-link-block__btn-link" aria-label="Cambridge Core. Opens in a new window." data-v-7c018d72><!----><!--[--><span>Cambridge Core</span><!--]--><!----><!----></a></li><li class="apl-link-block__list-item" data-v-7c018d72><a tabindex="0" href="https://www.cambridge.org/engage/coe/public-dashboard" target="_blank" class="apl-button apl-button--primary apl-button--lg apl-button--text apl-button--link apl-link-block__btn-link" aria-label="Cambridge Open Engage. Opens in a new window." data-v-7c018d72><!----><!--[--><span>Cambridge Open Engage</span><!--]--><!----><!----></a></li><li class="apl-link-block__list-item" data-v-7c018d72><a tabindex="0" href="https://www.cambridge.org/highereducation/" target="_blank" class="apl-button apl-button--primary apl-button--lg apl-button--text apl-button--link apl-link-block__btn-link" aria-label="Cambridge Higher Education. Opens in a new window." data-v-7c018d72><!----><!--[--><span>Cambridge Higher Education</span><!--]--><!----><!----></a></li><!--]--></ul></nav><nav class="apl-link-block__nav" data-v-7c018d72 data-v-20efd383><h2 class="apl-link-block__heading" data-v-7c018d72>Our Products</h2><ul class="apl-link-block__list-platforms" data-v-7c018d72><!--[--><li class="apl-link-block__list-item" data-v-7c018d72><a tabindex="0" href="https://www.cambridge.org/core/publications/journals" target="_blank" class="apl-button apl-button--primary apl-button--lg apl-button--text apl-button--link apl-link-block__btn-link" aria-label="Journals. Opens in a new window." data-v-7c018d72><!----><!--[--><span>Journals</span><!--]--><!----><!----></a></li><li class="apl-link-block__list-item" data-v-7c018d72><a tabindex="0" href="https://www.cambridge.org/core/search?aggs%5BproductTypes%5D%5Bfilters%5D=BOOK" target="_blank" class="apl-button apl-button--primary apl-button--lg apl-button--text apl-button--link apl-link-block__btn-link" aria-label="Books. Opens in a new window." data-v-7c018d72><!----><!--[--><span>Books</span><!--]--><!----><!----></a></li><li class="apl-link-block__list-item" data-v-7c018d72><a tabindex="0" href="https://www.cambridge.org/core/publications/elements" target="_blank" class="apl-button apl-button--primary apl-button--lg apl-button--text apl-button--link apl-link-block__btn-link" aria-label="Elements. Opens in a new window." data-v-7c018d72><!----><!--[--><span>Elements</span><!--]--><!----><!----></a></li><li class="apl-link-block__list-item" data-v-7c018d72><a tabindex="0" href="https://www.cambridge.org/highereducation/search?aggs=%24productTypes%24BOOK%3Atrue%3B%3B" target="_blank" class="apl-button apl-button--primary apl-button--lg apl-button--text apl-button--link apl-link-block__btn-link" aria-label="Textbooks. Opens in a new window." data-v-7c018d72><!----><!--[--><span>Textbooks</span><!--]--><!----><!----></a></li><li class="apl-link-block__list-item" data-v-7c018d72><a tabindex="0" href="https://www.cambridge.org/highereducation/search?aggs=%24productTypes%24COURSEWARE%3Atrue%3B%3B" target="_blank" class="apl-button apl-button--primary apl-button--lg apl-button--text apl-button--link apl-link-block__btn-link" aria-label="Courseware. Opens in a new window." data-v-7c018d72><!----><!--[--><span>Courseware</span><!--]--><!----><!----></a></li><!--]--></ul></nav></div><div class="apl-footer__socials-and-location" data-v-20efd383><div class="apl-footer__socials" data-v-20efd383><h2 class="apl-footer__heading" data-v-20efd383>Join us online</h2><ul class="apl-footer__list-socials" data-v-20efd383><!--[--><li class="apl-footer__list-item" data-v-20efd383><a tabindex="0" href="https://www.youtube.com/playlist?list=PLTK8KRW19hUVucVRHbIx73oLKUro8HXt0" target="_blank" class="apl-button apl-button--primary apl-button--sm apl-button--text apl-button--link apl-footer__list-item-link" aria-label="Visit Cambridge University Press Youtube account. Opens in a new tab." data-v-20efd383><!----><!--[--><div class="apl-icon apl-icon--youtube" style="color:;" data-v-27c0a44c data-v-20efd383><!----></div><!--]--><!----><!----></a></li><li class="apl-footer__list-item" data-v-20efd383><a tabindex="0" href="https://twitter.com/CambridgeCore" target="_blank" class="apl-button apl-button--primary apl-button--sm apl-button--text apl-button--link apl-footer__list-item-link" aria-label="Visit Cambridge University Press X account. Opens in a new tab." data-v-20efd383><!----><!--[--><div class="apl-icon apl-icon--x" style="color:;" data-v-27c0a44c data-v-20efd383><!----></div><!--]--><!----><!----></a></li><li class="apl-footer__list-item" data-v-20efd383><a tabindex="0" href="https://www.facebook.com/CambridgeCore" target="_blank" class="apl-button apl-button--primary apl-button--sm apl-button--text apl-button--link apl-footer__list-item-link" aria-label="Visit Cambridge University Press Facebook account. Opens in a new tab." data-v-20efd383><!----><!--[--><div class="apl-icon apl-icon--facebook" style="color:;" data-v-27c0a44c data-v-20efd383><!----></div><!--]--><!----><!----></a></li><li class="apl-footer__list-item" data-v-20efd383><a tabindex="0" href="https://www.instagram.com/cambridgeuniversitypress/" target="_blank" class="apl-button apl-button--primary apl-button--sm apl-button--text apl-button--link apl-footer__list-item-link" aria-label="Visit Cambridge University Press Instagram account. Opens in a new tab." data-v-20efd383><!----><!--[--><div class="apl-icon apl-icon--instagram" style="color:;" data-v-27c0a44c data-v-20efd383><!----></div><!--]--><!----><!----></a></li><li class="apl-footer__list-item" data-v-20efd383><a tabindex="0" href="https://www.linkedin.com/showcase/11096649" target="_blank" class="apl-button apl-button--primary apl-button--sm apl-button--text apl-button--link apl-footer__list-item-link" aria-label="Visit Cambridge University Press Linkedin account. Opens in a new tab." data-v-20efd383><!----><!--[--><div class="apl-icon apl-icon--linkedin" style="color:;" data-v-27c0a44c data-v-20efd383><!----></div><!--]--><!----><!----></a></li><!--]--></ul></div><div class="apl-footer__location apl-footer__location--desktop" data-v-20efd383><div class="apl-dropdown" data-v-6114426c data-v-20efd383><div class="apl-location" data-v-ae305bb6 data-v-6114426c><div class="apl-location__main" data-v-ae305bb6><div class="apl-location__location-picker" data-v-ae305bb6><label class="apl-location__label" for="footer-location-picker" aria-label="Select your location" data-v-ae305bb6>Location</label><div class="apl-location__input-wrapper" data-v-ae305bb6><input id="footer-location-picker" type="text" role="combobox" aria-expanded="false" placeholder="Choose your location" class="apl-location__input" value="GBR" data-v-ae305bb6><div class="apl-icon apl-icon--globe-web apl-location__globe" style="color:;" aria-hidden="true" data-v-27c0a44c data-v-ae305bb6><!----></div><!----><button tabindex="0" type="button" class="apl-button apl-button--primary apl-button--sm apl-button--icon-only apl-location__chevron-btn" aria-label="open location list" data-v-ae305bb6><!----><!--[--><!----><!--]--><div class="apl-icon apl-icon--chevron-down apl-button__icon--center" style="color:;" tabindex="-1" data-v-27c0a44c><!----></div><!----></button><!----><div class="apl-location__error" data-v-ae305bb6><div class="apl-icon apl-icon--error apl-location__error-icon" style="color:;" data-v-27c0a44c data-v-ae305bb6><!----></div><p class="apl-location__error-message" data-v-ae305bb6> Please choose a valid location. </p></div></div></div><button tabindex="0" type="submit" class="apl-button apl-button--secondary apl-button--sm apl-location__button" aria-label="Submit to update location preference" data-v-ae305bb6><!----><!--[--><span>Update</span><!--]--><!----><!----></button></div></div></div><!----></div></div></div><!--]--></div></section><section class="apl-footer__bottom" data-v-20efd383><div class="apl-container apl-footer__container-bottom" data-v-20efd383><!--[--><button tabindex="0" type="button" class="apl-button apl-button--primary apl-button--lg apl-footer__btn-collapse" aria-label="Click to open legal information links list" aria-expanded="false" aria-controls="apl-footer__legal-links" data-v-20efd383><!----><!--[--><span>Legal Information</span><!--]--><!----><div class="apl-icon apl-icon--chevron-down apl-button__icon--right" style="color:;" tabindex="-1" data-v-27c0a44c><!----></div></button><ul class="apl-footer__list-legal" id="apl-footer__legal-links" data-v-20efd383><!--[--><li class="apl-footer__list-item apl-footer__list-item-legal" data-v-20efd383><a tabindex="0" href="https://www.cambridge.org/about-us/rights-permissions/" target="_blank" class="apl-button apl-button--primary apl-button--lg apl-button--text apl-button--link apl-footer__btn-link apl-footer__btn-link-legal" aria-label="Rights and Permissions. Opens in a new window." data-v-20efd383><!----><!--[--><span>Rights & Permissions</span><!--]--><!----><!----></a></li><li class="apl-footer__list-item apl-footer__list-item-legal" data-v-20efd383><a tabindex="0" href="https://www.cambridge.org/about-us/legal-notices/copyright/" target="_blank" class="apl-button apl-button--primary apl-button--lg apl-button--text apl-button--link apl-footer__btn-link apl-footer__btn-link-legal" aria-label="Copyright. Opens in a new window." data-v-20efd383><!----><!--[--><span>Copyright</span><!--]--><!----><!----></a></li><li class="apl-footer__list-item apl-footer__list-item-legal" data-v-20efd383><a tabindex="0" href="https://www.cambridge.org/about-us/legal-notices/privacy-policy/" target="_blank" class="apl-button apl-button--primary apl-button--lg apl-button--text apl-button--link apl-footer__btn-link apl-footer__btn-link-legal" aria-label="Privacy Notice. Opens in a new window." data-v-20efd383><!----><!--[--><span>Privacy Notice</span><!--]--><!----><!----></a></li><li class="apl-footer__list-item apl-footer__list-item-legal" data-v-20efd383><a tabindex="0" href="https://www.cambridge.org/about-us/legal-notices/terms-use" target="_blank" class="apl-button apl-button--primary apl-button--lg apl-button--text apl-button--link apl-footer__btn-link apl-footer__btn-link-legal" aria-label="Terms of Use. Opens in a new window." data-v-20efd383><!----><!--[--><span>Terms of Use</span><!--]--><!----><!----></a></li><li class="apl-footer__list-item apl-footer__list-item-legal" data-v-20efd383><a tabindex="0" href="https://www.cambridge.org/about-us/legal-notices/cookies-policy/" target="_blank" class="apl-button apl-button--primary apl-button--lg apl-button--text apl-button--link apl-footer__btn-link apl-footer__btn-link-legal" aria-label="Cookies Policy. Opens in a new window." data-v-20efd383><!----><!--[--><span>Cookies Policy</span><!--]--><!----><!----></a></li><!--]--></ul><div class="apl-footer__copyright" data-v-20efd383><div class="apl-icon apl-icon--copyright" style="color:;" data-v-27c0a44c data-v-20efd383><!----></div><span data-v-20efd383> Cambridge University Press 2024</span></div><!--]--></div></section></footer></div><!----><!----><!--]--></div> </div></div><script>window.__PLATFORM_FOOTER_DATA__ = {"data":{"entry":{"slug":"global-config","platformLinks":[{"name":"navigationBarCategory","value":{"title":"Browse","navigationBarArea":[{"name":"navigationBarArea","value":{"title":"Subjects","hubPage":{"name":"url","value":{"title":"Subjects","url":"/core/browse-subjects","ariaLabel":"Subjects"}},"column1Heading":" Subjects (A-D)","column1StaticPagesOrUrls":[{"name":"url","value":{"title":"Anthropology","url":"/core/browse-subjects/anthropology","ariaLabel":"Anthropology"}},{"name":"url","value":{"title":"Archaeology","url":"/core/browse-subjects/archaeology","ariaLabel":"Archaeology"}},{"name":"url","value":{"title":"Area Studies","url":"/core/browse-subjects/area-studies","ariaLabel":"Area Studies"}},{"name":"url","value":{"title":"Art","url":"/core/browse-subjects/art","ariaLabel":"Art"}},{"name":"url","value":{"title":"Chemistry","url":"/core/browse-subjects/chemistry","ariaLabel":"Chemistry"}},{"name":"url","value":{"title":"Classical Studies","url":"/core/browse-subjects/classical-studies","ariaLabel":"Classical Studies"}},{"name":"url","value":{"title":"Computer Science","url":"/core/browse-subjects/computer-science","ariaLabel":"Computer Science"}},{"name":"url","value":{"title":"Drama, Theatre, Performance Studies","url":"/core/browse-subjects/drama-and-theatre","ariaLabel":"Drama, Theatre, Performance Studies"}}],"column2Heading":" Subjects (E-K)","column2StaticPagesOrUrls":[{"name":"url","value":{"title":"Earth and Environmental Science","url":"/core/browse-subjects/earth-and-environmental-sciences","ariaLabel":"Earth and Environmental Science"}},{"name":"url","value":{"title":"Economics","url":"/core/browse-subjects/economics","ariaLabel":"Economics"}},{"name":"url","value":{"title":"Education","url":"/core/browse-subjects/education","ariaLabel":"Education"}},{"name":"url","value":{"title":"Engineering","url":"/core/browse-subjects/engineering","ariaLabel":"Engineering"}},{"name":"url","value":{"title":"English Language Teaching – Resources for Teachers","url":"/core/browse-subjects/english-language-teaching-resources-for-teachers","ariaLabel":"English Language Teaching – Resources for Teachers"}},{"name":"url","value":{"title":"Film, Media, Mass Communication","url":"/core/browse-subjects/film-media-mass-ommunication","ariaLabel":"Film, Media, Mass Communication"}},{"name":"url","value":{"title":"General Science","url":"/core/browse-subjects/general-science","ariaLabel":"General Science"}},{"name":"url","value":{"title":"Geography","url":"/core/browse-subjects/geography","ariaLabel":"Geography"}},{"name":"url","value":{"title":"History","url":"/core/browse-subjects/history","ariaLabel":"History"}}],"column3Heading":" Subjects (L-O)","column3StaticPagesOrUrls":[{"name":"url","value":{"title":"Language and Linguistics","url":"/core/browse-subjects/language-and-linguistics","ariaLabel":"Language and Linguistics"}},{"name":"url","value":{"title":"Law","url":"/core/browse-subjects/law","ariaLabel":"Law"}},{"name":"url","value":{"title":"Life Sciences","url":"/core/browse-subjects/life-sciences","ariaLabel":"Life Sciences"}},{"name":"url","value":{"title":"Literature","url":"/core/browse-subjects/literature","ariaLabel":"Literature"}},{"name":"url","value":{"title":"Management","url":"/core/browse-subjects/management","ariaLabel":"Management"}},{"name":"url","value":{"title":"Materials Science","url":"/core/browse-subjects/materials-science","ariaLabel":"Materials Science"}},{"name":"url","value":{"title":"Mathematics","url":"/core/browse-subjects/mathematics","ariaLabel":"Mathematics"}},{"name":"url","value":{"title":"Medicine","url":"/core/browse-subjects/medicine","ariaLabel":"Medicine"}},{"name":"url","value":{"title":"Music","url":"/core/browse-subjects/music","ariaLabel":"Music"}},{"name":"url","value":{"title":"Nutrition","url":"/core/browse-subjects/nutrition","ariaLabel":"Nutrition"}}],"column4Heading":" Subjects (P-Z)","column4StaticPagesOrUrls":[{"name":"url","value":{"title":"Philosophy","url":"/core/browse-subjects/philosophy","ariaLabel":"Philosophy"}},{"name":"url","value":{"title":"Physics and Astronomy","url":"/core/browse-subjects/physics","ariaLabel":"Physics and Astronomy"}},{"name":"url","value":{"title":"Politics and International Relations","url":"/core/browse-subjects/politics-and-international-relations","ariaLabel":"Politics and International Relations"}},{"name":"url","value":{"title":"Psychiatry","url":"/core/browse-subjects/psychiatry","ariaLabel":"Psychiatry"}},{"name":"url","value":{"title":"Psychology","url":"/core/browse-subjects/psychology","ariaLabel":"Psychology"}},{"name":"url","value":{"title":"Religion","url":"/core/browse-subjects/religion","ariaLabel":"Religion"}},{"name":"url","value":{"title":"Social Science Research Methods","url":"/core/browse-subjects/social-science-research-methods","ariaLabel":"Social Science Research Methods"}},{"name":"url","value":{"title":"Sociology","url":"/core/browse-subjects/sociology","ariaLabel":"Sociology"}},{"name":"url","value":{"title":"Statistics and Probability","url":"/core/browse-subjects/statistics-and-probability","ariaLabel":"Statistics and Probability"}}],"slug":"subjects"}},{"name":"navigationBarArea","value":{"title":"Open access","hubPage":{"name":"url","value":{"title":"Open access","url":"/core/publications/open-access","ariaLabel":"Open access"}},"column1Heading":"All open access publishing","column1StaticPagesOrUrls":[{"name":"url","value":{"title":"Open access","url":"/core/publications/open-access","ariaLabel":"Open access"}},{"name":"url","value":{"title":"Open access journals","url":"/core/publications/open-access/listing?aggs[productTypes][filters]=JOURNAL&statuses=PUBLISHED&sort=titleSort:asc","ariaLabel":"Open access journals"}},{"name":"url","value":{"title":"Research open journals","url":"/core/publications/open-access/research-open?aggs[productTypes][filters]=JOURNAL&statuses=PUBLISHED&sort=titleSort:asc","ariaLabel":"Research open journals"}},{"name":"url","value":{"title":"Journals containing open access","url":"/core/publications/open-access/hybrid-open-access-journals?aggs[productTypes][filters]=JOURNAL&statuses=PUBLISHED&sort=titleSort:asc","ariaLabel":"Journals containing open access"}},{"name":"url","value":{"title":"Open access articles","url":"/core/publications/open-access/listing?aggs[productTypes][filters]=JOURNAL_ARTICLE","ariaLabel":"Open access articles"}},{"name":"url","value":{"title":"Open access books","url":"/core/publications/open-access/listing?aggs[productTypes][filters]=BOOK&sort=canonical.date:desc","ariaLabel":"Open access books"}},{"name":"url","value":{"title":"Open access Elements","url":"/core/publications/elements/published-elements?aggs%5BopenAccess%5D%5Bfilters%5D=7275BA1E84CA769210167A6A66523B47&aggs%5BproductTypes%5D%5Bfilters%5D=ELEMENT&searchWithinIds=ECFD8F5C64F47F3F5A3D395C15B7C493","ariaLabel":"Open access Elements"}}],"slug":"open-access"}},{"name":"navigationBarArea","value":{"title":"Journals","hubPage":{"name":"url","value":{"title":"Journals","url":"/core/publications/journals","ariaLabel":"Journals"}},"column1Heading":"Explore","column1StaticPagesOrUrls":[{"name":"url","value":{"title":"All journal subjects","url":"/core/publications/journals","ariaLabel":"All journal subjects"}},{"name":"url","value":{"title":"Search journals","url":"/core/publications/journals","ariaLabel":"Search journals"}}],"column2Heading":"Open access","column2StaticPagesOrUrls":[{"name":"url","value":{"title":"Open access journals","url":"/core/publications/open-access/listing?aggs[productTypes][filters]=JOURNAL&statuses=PUBLISHED&sort=titleSort:asc","ariaLabel":"Open access journals"}},{"name":"url","value":{"title":"Research open journals","url":"/core/publications/open-access/research-open?aggs[productTypes][filters]=JOURNAL&statuses=PUBLISHED&sort=titleSort:asc","ariaLabel":"Research open journals"}},{"name":"url","value":{"title":"Journals containing open access","url":"/core/publications/open-access/hybrid-open-access-journals?aggs[productTypes][filters]=JOURNAL&statuses=PUBLISHED&sort=titleSort:asc","ariaLabel":"Journals containing open access"}},{"name":"url","value":{"title":"Open access articles","url":"/core/publications/open-access/listing?aggs[productTypes][filters]=JOURNAL_ARTICLE","ariaLabel":"Open access articles"}}],"column3Heading":"Collections","column3StaticPagesOrUrls":[{"name":"url","value":{"title":"Cambridge Forum","url":"/core/publications/collections/cambridge-forum","ariaLabel":"Cambridge Forum"}},{"name":"url","value":{"title":"Cambridge Law Reports Collection","url":"/core/publications/collections/cambridge-law-reports-collection","ariaLabel":"Cambridge Law Reports Collection"}},{"name":"url","value":{"title":"Cambridge Prisms","url":"/core/publications/collections/cambridge-prisms","ariaLabel":"Cambridge Prisms"}},{"name":"url","value":{"title":"Research Directions","url":"/core/publications/collections/research-directions","ariaLabel":"Research Directions"}}],"slug":"journals"}},{"name":"navigationBarArea","value":{"title":"Books","hubPage":{"name":"url","value":{"title":"Books","url":"/core/publications/books","ariaLabel":"Books"}},"column1Heading":"Explore","column1StaticPagesOrUrls":[{"name":"url","value":{"title":"Books","url":"/core/publications/books","ariaLabel":"Books"}},{"name":"url","value":{"title":"Open access books","url":"/core/publications/open-access/listing?aggs[productTypes][filters]=BOOK&sort=canonical.date:desc","ariaLabel":"Open access books"}},{"name":"url","value":{"title":"New books","url":"/core/publications/books/listing?aggs[productDate][filters]=Last+3+months&aggs[productTypes][filters]=BOOK&sort=canonical.date:desc","ariaLabel":"New books"}},{"name":"url","value":{"title":"Flip it Open","url":"/core/publications/collections/flip-it-open","ariaLabel":"Flip it Open"}}],"column2Heading":"Collections","column2StaticPagesOrUrls":[{"name":"url","value":{"title":"Cambridge Companions","url":"/core/publications/collections/cambridge-companions","ariaLabel":"Cambridge Companions"}},{"name":"url","value":{"title":"Cambridge Editions","url":"/core/publications/collections/cambridge-editions","ariaLabel":"Cambridge Editions"}},{"name":"url","value":{"title":"Cambridge Histories","url":"/core/publications/collections/cambridge-histories","ariaLabel":"Cambridge Histories"}},{"name":"url","value":{"title":"Cambridge Library Collection","url":"/core/publications/collections/cambridge-library-collection","ariaLabel":"Cambridge Library Collection"}},{"name":"url","value":{"title":"Cambridge Shakespeare","url":"/core/publications/collections/cambridge-shakespeare","ariaLabel":"Cambridge Shakespeare"}},{"name":"url","value":{"title":"Cambridge Handbooks","url":"/core/publications/collections/cambridgehandbooks","ariaLabel":"Cambridge Handbooks"}}],"column3Heading":" Collections (cont.)","column3StaticPagesOrUrls":[{"name":"url","value":{"title":"Dispute Settlement Reports Online","url":"/core/publications/collections/dispute-settlement-reports-online","ariaLabel":"Dispute Settlement Reports Online"}},{"name":"url","value":{"title":"Flip it Open","url":"/core/publications/collections/flip-it-open","ariaLabel":"Flip it Open"}},{"name":"url","value":{"title":"Hemingway Letters","url":"/core/publications/collections/hemingway-letters","ariaLabel":"Hemingway Letters"}},{"name":"url","value":{"title":"Shakespeare Survey","url":"/core/publications/collections/shakespeare-survey","ariaLabel":"Shakespeare Survey"}},{"name":"url","value":{"title":"Stahl Online","url":"/core/publications/collections/stahl-online","ariaLabel":"Stahl Online"}},{"name":"url","value":{"title":"The Correspondence of Isaac Newton","url":"/core/publications/collections/the-correspondence-of-isaac-newton","ariaLabel":"The Correspondence of Isaac Newton"}}],"slug":"books"}},{"name":"navigationBarArea","value":{"title":"Elements","hubPage":{"name":"url","value":{"title":"Elements","url":"/core/publications/elements","ariaLabel":"Elements"}},"column1Heading":"Explore","column1StaticPagesOrUrls":[{"name":"url","value":{"title":"About Elements","url":"/core/publications/elements","ariaLabel":"About Elements"}},{"name":"url","value":{"title":"Elements series","url":"/core/publications/elements/cambridge-elements-series","ariaLabel":"Elements series"}},{"name":"url","value":{"title":"Open access Elements","url":"/core/publications/elements/published-elements?aggs%5BopenAccess%5D%5Bfilters%5D=7275BA1E84CA769210167A6A66523B47&aggs%5BproductTypes%5D%5Bfilters%5D=ELEMENT&searchWithinIds=ECFD8F5C64F47F3F5A3D395C15B7C493","ariaLabel":"Open access Elements"}},{"name":"url","value":{"title":"New Elements","url":"/core/publications/elements/published-elements?aggs%5BproductTypes%5D%5Bfilters%5D=ELEMENT&aggs%5BproductDate%5D%5Bfilters%5D=Last%203%20months&searchWithinIds=ECFD8F5C64F47F3F5A3D395C15B7C493","ariaLabel":"New Elements"}}],"column2Heading":"Subjects (A-E)","column2StaticPagesOrUrls":[{"name":"url","value":{"title":"Anthropology","url":"/core/elements/subject/Anthropology/2E44A5AF2838E017617A26DD79FAEAEE","ariaLabel":"Anthropology"}},{"name":"url","value":{"title":"Archaeology","url":"/core/elements/subject/Archaeology/63A50B5368A9F97F8AA2D6AB965B5F4C","ariaLabel":"Archaeology"}},{"name":"url","value":{"title":"Classical Studies","url":"/core/elements/subject/Classical%20Studies/DDC63B7F5792FE2A95D1FB15F76E3F42","ariaLabel":"Classical Studies"}},{"name":"url","value":{"title":"Computer Science","url":"/core/elements/subject/Computer%20Science/A57E10708F64FB69CE78C81A5C2A6555","ariaLabel":"Computer Science"}},{"name":"url","value":{"title":"Drama, Theatre, Performance Studies","url":"/core/elements/subject/Drama,%20Theatre,%20Performance%20Studies/2825E4E39F2D641B36543EE80FB1DEA3","ariaLabel":"Drama, Theatre, Performance Studies"}},{"name":"url","value":{"title":"Earth and Environmental Sciences","url":"/core/elements/subject/Earth%20and%20Environmental%20Sciences/F470FBF5683D93478C7CAE5A30EF9AE8","ariaLabel":"Earth and Environmental Sciences"}},{"name":"url","value":{"title":"Economics","url":"/core/elements/subject/Economics/FA44491F1F55F917C43E9832715B9DE7","ariaLabel":"Economics"}},{"name":"url","value":{"title":"Education","url":"/core/elements/subject/Education/550D00F8DF590F2598CF7CC0038E24D1","ariaLabel":"Education"}},{"name":"url","value":{"title":"Engineering","url":"/core/elements/subject/Engineering/CCC62FE56DCC1D050CA1340C1CCF46F5","ariaLabel":"Engineering"}}],"column3Heading":" Subjects (F-O)","column3StaticPagesOrUrls":[{"name":"url","value":{"title":"Film, Media, Mass Communication","url":"/core/elements/subject/Film,%20Media,%20Mass%20Communication/4B91F10E834814A90CE718E7831E492F","ariaLabel":"Film, Media, Mass Communication"}},{"name":"url","value":{"title":"History","url":"/core/elements/subject/History/66BE42A30172E280FDE64F8EE2F485B0","ariaLabel":"History"}},{"name":"url","value":{"title":"Language and Linguistics","url":"/core/elements/subject/Language%20and%20Linguistics/140D314098408C26BDF3009F7FF858E9","ariaLabel":"Language and Linguistics"}},{"name":"url","value":{"title":"Law","url":"/core/elements/subject/Law/7C9FB6788DD8D7E6696263BC774F4D5B","ariaLabel":"Law"}},{"name":"url","value":{"title":"Life Sciences","url":"/core/elements/subject/Life%20Sciences/E044EF2F61B601378786E9EDA901B2D5","ariaLabel":"Life Sciences"}},{"name":"url","value":{"title":"Literature","url":"/core/elements/subject/Literature/F2434ADC122145767C6C3B988A8E9BD5","ariaLabel":"Literature"}},{"name":"url","value":{"title":"Management","url":"/core/elements/subject/Management/0EDCC0540639B06A5669BDEEF50C4CBE","ariaLabel":"Management"}},{"name":"url","value":{"title":"Mathematics","url":"/core/elements/subject/Mathematics/FA1467C44B5BD46BB8AA6E58C2252153","ariaLabel":"Mathematics"}},{"name":"url","value":{"title":"Medicine","url":"/core/elements/subject/Medicine/66FF02B2A4F83D9A645001545197F287","ariaLabel":"Medicine"}},{"name":"url","value":{"title":"Music","url":"/core/elements/subject/Music/A370B5604591CB3C7F9AFD892DDF7BD1","ariaLabel":"Music"}}],"column4Heading":" Subjects (P-Z)","column4StaticPagesOrUrls":[{"name":"url","value":{"title":"Philosophy","url":"/core/elements/subject/Philosophy/2D1AC3C0E174F1F1A93F8C7DE19E0FAB","ariaLabel":"Philosophy"}},{"name":"url","value":{"title":"Physics and Astronomy","url":"/core/elements/subject/Physics%20and%20Astronomy/DBFB610E9FC5E012C011430C0573CC06","ariaLabel":"Physics and Astronomy"}},{"name":"url","value":{"title":"Politics and International Relations","url":"/core/elements/subject/Politics%20and%20International%20Relations/3BF83347E5E456DAC34F3FABFC8BBF4E","ariaLabel":"Politics and International Relations"}},{"name":"url","value":{"title":"Psychology","url":"/core/elements/subject/Psychology/21B42A72BA3E4CB0E3315E5B1B71B07F","ariaLabel":"Psychology"}},{"name":"url","value":{"title":"Religion","url":"/core/elements/subject/Religion/53E51D24FB488962B9364A2C4B45D1C3","ariaLabel":"Religion"}},{"name":"url","value":{"title":"Sociology","url":"/core/elements/subject/Sociology/0E2CD53A93003DF17E52D753F6E90683","ariaLabel":"Sociology"}},{"name":"url","value":{"title":"Statistics and Probability","url":"/core/elements/subject/Statistics%20and%20Probability/3150B8B0D1B0B4E8DC17EC9EDFD9CA26","ariaLabel":"Statistics and Probability"}}],"slug":"elements"}},{"name":"navigationBarArea","value":{"title":"Textbooks","hubPage":{"name":"url","value":{"title":"Textbooks","url":"/core/publications/textbooks","ariaLabel":"Textbooks"}},"column1Heading":"Explore","column1StaticPagesOrUrls":[{"name":"url","value":{"title":"Cambridge Higher Education","url":"/highereducation/","ariaLabel":"Cambridge Higher Education"}},{"name":"url","value":{"title":"Title list","url":"/highereducation/services/librarians/title-list","ariaLabel":"Title list"}},{"name":"url","value":{"title":"New titles","url":"/highereducation/search?sortBy=publication_date&aggs=%24productDate%24Last%25206%2520months%3Atrue%26Last%252012%2520months%3Atrue%26Last%25203%2520years%3Atrue%26Over%25203%2520years%3Atrue%3B%3B&event=SE-AU_PREF","ariaLabel":"New titles"}}],"slug":"textbooks"}},{"name":"navigationBarArea","value":{"title":"Collections","hubPage":{"name":"url","value":{"title":"Collections","url":"/core/publications/collections","ariaLabel":"Collections"}},"column1Heading":"Book collections","column1StaticPagesOrUrls":[{"name":"url","value":{"title":"Cambridge Companions","url":"/core/publications/collections/cambridge-companions","ariaLabel":"Cambridge Companions"}},{"name":"url","value":{"title":"Cambridge Editions","url":"/core/publications/collections/cambridge-editions","ariaLabel":"Cambridge Editions"}},{"name":"url","value":{"title":"Cambridge Histories","url":"/core/publications/collections/cambridge-histories","ariaLabel":"Cambridge Histories"}},{"name":"url","value":{"title":"Cambridge Library Collection","url":"/core/publications/collections/cambridge-library-collection","ariaLabel":"Cambridge Library Collection"}},{"name":"url","value":{"title":"Cambridge Shakespeare","url":"/core/publications/collections/cambridge-shakespeare","ariaLabel":"Cambridge Shakespeare"}},{"name":"url","value":{"title":"Cambridge Handbooks","url":"/core/publications/collections/cambridgehandbooks","ariaLabel":"Cambridge Handbooks"}}],"column2Heading":" Book collections (cont.)","column2StaticPagesOrUrls":[{"name":"url","value":{"title":"Dispute Settlement Reports Online","url":"/core/publications/collections/dispute-settlement-reports-online","ariaLabel":"Dispute Settlement Reports Online"}},{"name":"url","value":{"title":"Flip it Open","url":"/core/publications/collections/flip-it-open","ariaLabel":"Flip it Open"}},{"name":"url","value":{"title":"Hemingway Letters","url":"/core/publications/collections/hemingway-letters","ariaLabel":"Hemingway Letters"}},{"name":"url","value":{"title":"Shakespeare Survey","url":"/core/publications/collections/shakespeare-survey","ariaLabel":"Shakespeare Survey"}},{"name":"url","value":{"title":"Stahl Online","url":"/core/publications/collections/stahl-online","ariaLabel":"Stahl Online"}},{"name":"url","value":{"title":"The Correspondence of Isaac Newton","url":"/core/publications/collections/the-correspondence-of-isaac-newton","ariaLabel":"The Correspondence of Isaac Newton"}}],"column3Heading":"Journal collections","column3StaticPagesOrUrls":[{"name":"url","value":{"title":"Cambridge Forum","url":"/core/publications/collections/cambridge-forum","ariaLabel":"Cambridge Forum"}},{"name":"url","value":{"title":"Cambridge Law Reports Collection","url":"/core/publications/collections/cambridge-law-reports-collection","ariaLabel":"Cambridge Law Reports Collection"}},{"name":"url","value":{"title":"Cambridge Prisms","url":"/core/publications/collections/cambridge-prisms","ariaLabel":"Cambridge Prisms"}},{"name":"url","value":{"title":"Research Directions","url":"/core/publications/collections/research-directions","ariaLabel":"Research Directions"}}],"column4Heading":"Series","column4StaticPagesOrUrls":[{"name":"url","value":{"title":"All series","url":"/core/publications/collections/series","ariaLabel":"All series"}}],"slug":"collections"}},{"name":"navigationBarArea","value":{"title":"Partners","hubPage":{"name":"url","value":{"title":"Publishing partners","url":"/core/publications/publishing-partners","ariaLabel":"Publishing partners"}},"column1Heading":"Partners","column1StaticPagesOrUrls":[{"name":"url","value":{"title":"Agenda Publishing","url":"/core/publications/publishing-partners/agenda-publishing","ariaLabel":"Agenda Publishing"}},{"name":"url","value":{"title":"Amsterdam University Press","url":"/core/publications/publishing-partners/amsterdam-university-press","ariaLabel":"Amsterdam University Press"}},{"name":"url","value":{"title":"Anthem Press","url":"/core/publications/publishing-partners/anthem-press","ariaLabel":"Anthem Press"}},{"name":"url","value":{"title":"Boydell & Brewer","url":"/core/publications/publishing-partners/boydell-brewer","ariaLabel":"Boydell & Brewer"}},{"name":"url","value":{"title":"Bristol University Press","url":"/core/publications/publishing-partners/bristol-university-press","ariaLabel":"Bristol University Press"}},{"name":"url","value":{"title":"Edinburgh University Press","url":"/core/publications/publishing-partners/edinburgh-university-press","ariaLabel":"Edinburgh University Press"}},{"name":"url","value":{"title":"Emirates Center for Strategic Studies and Research","url":"/core/publications/publishing-partners/emirates-center","ariaLabel":"Emirates Center for Strategic Studies and Research"}},{"name":"url","value":{"title":"Facet Publishing","url":"/core/publications/publishing-partners/facet-publishing","ariaLabel":"Facet Publishing"}}],"column2Heading":" Partners (cont.)","column2StaticPagesOrUrls":[{"name":"url","value":{"title":"Foundation Books","url":"/core/publications/publishing-partners/foundation-books","ariaLabel":"Foundation Books"}},{"name":"url","value":{"title":"Intersentia","url":"/core/publications/publishing-partners/intersentia","ariaLabel":"Intersentia"}},{"name":"url","value":{"title":"ISEAS-Yusof Ishak Institute","url":"/core/publications/publishing-partners/iseas","ariaLabel":"ISEAS-Yusof Ishak Institute"}},{"name":"url","value":{"title":"Jagiellonian University Press","url":"/core/publications/publishing-partners/jagiellonian-university-press","ariaLabel":"Jagiellonian University Press"}},{"name":"url","value":{"title":"Royal Economic Society","url":"/core/publications/publishing-partners/royal-economic-society","ariaLabel":"Royal Economic Society"}},{"name":"url","value":{"title":"Unisa Press","url":"/core/publications/publishing-partners/unisa-press","ariaLabel":"Unisa Press"}},{"name":"url","value":{"title":"The University of Adelaide Press","url":"/core/publications/publishing-partners/university-adelaide-press","ariaLabel":"The University of Adelaide Press"}},{"name":"url","value":{"title":"Wits University Press","url":"/core/publications/publishing-partners/wits-university-press","ariaLabel":"Wits University Press"}}],"slug":"partners"}}]}},{"name":"navigationBarCategory","value":{"title":"Services","navigationBarArea":[{"name":"navigationBarArea","value":{"title":"About","hubPage":{"name":"staticPage","value":{"pageTitle":"About","pageBlurb":"Cambridge Core is the home of academic content from Cambridge University Press. Built with our users in mind our online platform has been designed to help readers and researchers to make fast and easy journeys to a vast range of valuable content. ","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"about","introTitle":"About","introText":"Cambridge Core is the home of academic content from Cambridge University Press. Built with our users in mind our online platform has been designed to help readers and researchers to make fast and easy journeys to a vast range of valuable content.","contentBlocks":[],"areas":["about"]}},"column1Heading":"About Cambridge Core","column1StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"About","pageBlurb":"Cambridge Core is the home of academic content from Cambridge University Press. Built with our users in mind our online platform has been designed to help readers and researchers to make fast and easy journeys to a vast range of valuable content. ","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"about","introTitle":"About","introText":"Cambridge Core is the home of academic content from Cambridge University Press. Built with our users in mind our online platform has been designed to help readers and researchers to make fast and easy journeys to a vast range of valuable content.","contentBlocks":[],"areas":["about"]}},{"name":"staticPage","value":{"pageTitle":"Accessibility","pageBlurb":"We want everyone who visits Cambridge Core to feel welcome and find the experience rewarding.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","slug":"accessibility","introTitle":"Accessibility","contentBlocks":[],"areas":["about"]}},{"name":"staticPage","value":{"pageTitle":"CrossMark policy","pageBlurb":"By applying the CrossMark logo, Cambridge University Press is committing to maintaining the content it publishes and to alerting readers to changes if and when they occur. ","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"crossmark-policy","introTitle":"CrossMark policy","introText":"CrossMark is a multi-publisher initiative to provide a standard way for readers to locate the current version of a piece of content. By applying the CrossMark logo, Cambridge University Press is committing to maintaining the content it publishes and to alerting readers to changes if and when they occur. Clicking on the CrossMark logo will tell you the current status of a document and may also give you additional publication record information about the document.","areas":["about"]}},{"name":"staticPage","value":{"pageTitle":"Ethical Standards","pageBlurb":"As a leading publisher of scholarly journals and books, Cambridge University Press is committed to meeting high standards of ethical behaviour at all stages of the publication process. ","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"ethical-standards","introTitle":"Ethical Standards","contentBlocks":[],"areas":["about"]}}],"column2Heading":"Environment and sustainability","column2StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"Environment and sustainability","pageBlurb":"At Cambridge, we publish research and share knowledge that informs, educates, and inspires others to drive positive environmental change.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"environment-and-sustainability","introTitle":"Environment and sustainability","introText":"At Cambridge, we publish research and share knowledge that informs, educates, and inspires others to drive positive environmental change.","contentBlocks":[],"areas":["about"]}},{"name":"staticPage","value":{"pageTitle":"Reducing print","pageBlurb":"As a department of the University of Cambridge, we are committed to reducing our carbon footprint. A significant reduction to the print component of our journals publishing activity will be a key element of realising this commitment. ","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"reducing-print","introTitle":"Reducing print","introText":"As a department of the University of Cambridge, we are committed to reducing our carbon footprint. A significant reduction to the print component of our journals publishing activity will be a key element of realising this commitment. ","contentBlocks":[],"areas":["about"]}},{"name":"staticPage","value":{"pageTitle":"Journals moving to online only","pageBlurb":"As we work to reduce print in our journal publishing, more of our journals are moving to online only.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"journals-moving-to-online-only","introTitle":"Journals moving to online only","introText":"As we work to reduce print in our journal publishing, more of our journals are moving to online only.","contentBlocks":[],"areas":["about"]}}],"column3Heading":"Guides","column3StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"User guides","pageBlurb":"Below you will find a range of video user guides to help you navigate and use accounts on Cambridge Core.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"user-guides","introTitle":"User guides","introText":"Below you will find a range of video user guides to help you navigate and use accounts on Cambridge Core.","contentBlocks":[],"areas":["about"]}},{"name":"staticPage","value":{"pageTitle":"User Guides and Videos","pageBlurb":"The content in the page offers video guidance to help you make full use of the features available on Cambridge Core.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","slug":"user-guides-and-videos","introTitle":"User Guides and Videos","contentBlocks":[],"areas":["about"]}},{"name":"staticPage","value":{"pageTitle":"Support Videos","pageBlurb":"For helpful content to support your Transformative Agreement for librarians and administrators, check out our LIVE WORKSHOP recording and slide deck below.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","slug":"support-videos","introTitle":"Support Videos","contentBlocks":[],"areas":["about"]}},{"name":"staticPage","value":{"pageTitle":"Training","pageBlurb":"Welcome to Training Services for Cambridge Core. ","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","slug":"training","introTitle":"Training","introText":"Welcome to Training Services for Cambridge Core! ","contentBlocks":[],"areas":["about"]}}],"column4Heading":"Help","column4StaticPagesOrUrls":[{"name":"url","value":{"title":"Cambridge Core help","url":"https://corehelp.cambridge.org/","ariaLabel":"Cambridge Core help pages"}},{"name":"url","value":{"title":"Contact us","url":"https://corehelp.cambridge.org/hc/en-gb/p/contact-information","ariaLabel":"This will take you to a contact form for technical support","pageBlurb":"Visit our knowledge bases and contact our Technical Support team for direct help and support with any technical issues or questions you may have","thumbnailImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}}}},{"name":"url","value":{"title":"Technical support","url":"https://corehelp.cambridge.org/hc/en-gb/requests/new","ariaLabel":"Technical support diagnostics report"}}],"slug":"about"}},{"name":"navigationBarArea","value":{"title":"Agents","hubPage":{"name":"staticPage","value":{"pageTitle":"Services for agents","pageBlurb":"Cambridge University Press is pleased to work with subscription agents in every country, whether those agents are multinational or specialised local companies.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"services-for-agents","introTitle":"Services for agents","contentBlocks":[],"areas":["agents"]}},"column1Heading":"Services for agents","column1StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"Services for agents","pageBlurb":"Cambridge University Press is pleased to work with subscription agents in every country, whether those agents are multinational or specialised local companies.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"services-for-agents","introTitle":"Services for agents","contentBlocks":[],"areas":["agents"]}},{"name":"staticPage","value":{"pageTitle":"Journals for agents","pageBlurb":"Cambridge is pleased to work with subscription agents in every country, whether those agents are multinational or specialised local companies. We now offer full content access to all Cambridge journals on Cambridge Core, including back issues.","linkImage":{"title":"GettyImages-2148427272","description":"A row of books on a metal, library style, bookshelf. the books are view from the rear and a hand is reaching and picking a book.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1BYMYdALXI4YZz9sJKegO1/0c50d6ede207418c796a9b5691b5c20a/GettyImages-2148427272.jpg","details":{"size":185617,"image":{"width":1536,"height":1024}},"fileName":"GettyImages-2148427272.jpg","contentType":"image/jpeg"}},"altText":"A row of books on a metal, library style, bookshelf. the books are view from the rear and a hand is reaching and picking a book.","slug":"journals-for-agents","introTitle":"Journals for agents","introText":"Cambridge is pleased to work with subscription agents in every country, whether those agents are multinational or specialised local companies. We now offer full content access to all Cambridge journals on Cambridge Core, including access to back issues.","contentBlocks":[],"areas":["agents"]}},{"name":"staticPage","value":{"pageTitle":"Books for agents","pageBlurb":"There are a number of different ebook purchasing options available on Cambridge Core to suit all your needs.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"books-for-agents","introTitle":"Books for agents","introText":"There are a number of different ebook purchasing options available on Cambridge Core to suit all your needs.","contentBlocks":[],"areas":["agents"]}},{"name":"staticPage","value":{"pageTitle":"Price list","pageBlurb":"Cambridge University Press 2025 price lists for journals, books and Elements. For subscription agents and academic institutions.","linkImage":{"title":"GettyImages-2148427272","description":"A row of books on a metal, library style, bookshelf. the books are view from the rear and a hand is reaching and picking a book.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1BYMYdALXI4YZz9sJKegO1/0c50d6ede207418c796a9b5691b5c20a/GettyImages-2148427272.jpg","details":{"size":185617,"image":{"width":1536,"height":1024}},"fileName":"GettyImages-2148427272.jpg","contentType":"image/jpeg"}},"altText":"A row of books on a metal, library style, bookshelf. the books are view from the rear and a hand is reaching and picking a book.","slug":"price-list","introTitle":"Price list","introText":"Cambridge University Press 2025 price lists for journals, books and Elements. For subscription agents and academic institutions.","contentBlocks":[],"areas":["librarians","agents"]}}],"slug":"agents"}},{"name":"navigationBarArea","value":{"title":"Authors","hubPage":{"name":"staticPage","value":{"pageTitle":"Authors","pageBlurb":"Cambridge University Press works closely with the global academic community to deliver the highest quality, peer-reviewed content, as well as a portfolio of innovative tools and services to advance learning and research.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"authors","introTitle":"Authors","introText":"Cambridge University Press works closely with the global academic community to deliver the highest quality, peer-reviewed content, as well as a portfolio of innovative tools and services to advance learning and research.","contentBlocks":[],"areas":[]}},"column1Heading":"Journals","column1StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"Journals","pageBlurb":"Cambridge University Press is proud to publish many of the world's leading journals across a wide range of subject areas in the humanities, social sciences and STM fields. We currently publish more than 400 peer-reviewed academic journals.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"journals","introTitle":"Journals","introText":"Cambridge University Press is proud to publish many of the world's leading journals across a wide range of subject areas in the humanities, social sciences and STM fields. We currently publish more than 400 peer-reviewed academic journals.","contentBlocks":[],"areas":["authors"]}},{"name":"staticPage","value":{"pageTitle":"Journal publishing statistics","pageBlurb":"During 2022 we are updating our journal home pages to provide new statistics on each journal's publishing activity. On this page we provide some information about how these statistics are calculated.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"journal-publishing-statistics","introTitle":"Journal publishing statistics","introText":"During 2022 we are updating our journal home pages to provide new statistics on each journal's publishing activity. On this page we provide some information about how these statistics are calculated.","contentBlocks":[],"areas":["authors"]}},{"name":"staticPage","value":{"pageTitle":"Corresponding author","pageBlurb":"The Corresponding Author is the person who handles the manuscript and correspondence during the publication process, including approving the article proofs.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"corresponding-author","introTitle":"Corresponding author","contentBlocks":[],"areas":["authors"]}},{"name":"staticPage","value":{"pageTitle":"Seeking permission to use copyrighted material","pageBlurb":"If your article contains any material in which you do not own copyright, including figures, charts, tables, photographs or excerpts of text, you must obtain permission from the copyright holder to reuse that material.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"seeking-permission-to-use-copyrighted-material","introTitle":"Seeking permission to use copyrighted material","introText":"If your article contains any material in which you do not own copyright, including figures, charts, tables, photographs or excerpts of text, you must obtain permission from the copyright holder to reuse that material. As the author it is your responsibility to obtain this permission and pay any related fees, and you will need to send us a copy of each permission statement at acceptance.","contentBlocks":[],"areas":["authors"]}},{"name":"staticPage","value":{"pageTitle":"Publishing supplementary material","pageBlurb":"Supplementary materials are hosted online with the main article, and can include data sets, video files, sound clips, figures or tables.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"publishing-supplementary-material","introTitle":"Publishing supplementary material","introText":"Many of our journals encourage authors to submit and publish supplementary materials that are not essential for inclusion or cannot be accommodated in the main article, but would be of benefit to the reader. Supplementary materials are hosted online with the main article, and can include data sets, video files, sound clips, figures or tables. The main text of the article should stand alone without the supplementary material.","contentBlocks":[],"areas":["authors"]}},{"name":"staticPage","value":{"pageTitle":"Writing an effective abstract","pageBlurb":"The title, abstract, and keywords you select for your manuscript play an important part in the discovery of your article after publication. ","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"writing-an-effective-abstract","introTitle":"Writing an effective abstract","introText":"The title, abstract, and keywords you select for your manuscript play an important part in the discovery of your article after publication. Since many researchers rely on search engines such as Google to find content relevant to their field, the careful selection of keywords in all of these can have a large impact on the life of an article, extending from readership through citation.","contentBlocks":[],"areas":["authors"]}},{"name":"staticPage","value":{"pageTitle":"Journal production - FAQs","pageBlurb":"If you have any questions on the production of your article check out our frequently asked questions.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"journal-production-faqs","introTitle":"Journal production - FAQs","introText":"Please click on a question below to go directly to the answer. If your question is not covered here, please direct it to your chosen journal’s editor or content manager.","contentBlocks":[],"areas":["authors"]}}],"column2Heading":"Journals (cont.)","column2StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"Author affiliations","pageBlurb":"Author affiliations FAQs. A guide to determining, submitting and the display of your affiliations as a Cambridge journal author.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"author-affiliations","introTitle":"Author affiliations","contentBlocks":[],"areas":["authors"]}},{"name":"staticPage","value":{"pageTitle":"Co-reviewing policy","pageBlurb":"In journals that allow co-reviewing, an invited reviewer can work with a more junior colleague to review a manuscript for the purpose of reviewer training.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"co-reviewing-policy","introTitle":"Co-reviewing policy","introText":"In journals that allow co-reviewing, an invited reviewer can work with a more junior colleague to review a manuscript for the purpose of reviewer training. This allows the co-reviewer to gain experience with the review process and become a viable reviewer for a journal.","contentBlocks":[],"areas":["authors"]}},{"name":"staticPage","value":{"pageTitle":"Digital Author Publishing Agreement - FAQs","pageBlurb":"Find out more about our digital author publishing agreements by checking out our frequently asked questions. ","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"digital-author-publishing-agreement-faqs","introTitle":"Digital Author Publishing Agreement FAQs","introText":"Click on a question below to see the corresponding answer. If your question is not covered here, please direct it to our author contracts team.","contentBlocks":[],"areas":["authors"]}},{"name":"staticPage","value":{"pageTitle":"Anonymising your manuscript","pageBlurb":"To maintain this anonymity, authors will need to remove any details that may reveal their identity from their manuscript, before it is reviewed.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"anonymising-your-manuscript","introTitle":"Anonymising your manuscript","introText":"Some of our journals use a double-anonymous peer review process, meaning neither the author nor the reviewers know the identity of each other. To maintain this anonymity, authors will need to remove any details that may reveal their identity from their manuscript, before it is reviewed.","contentBlocks":[],"areas":["authors"]}},{"name":"staticPage","value":{"pageTitle":"Publishing open access","pageBlurb":"Publishing your research Gold Open Access (Gold OA) helps to advance discovery by allowing anyone, anywhere to find, read, and benefit from your research.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"publishing-open-access","introTitle":"Publishing open access","contentBlocks":[],"areas":["authors"]}},{"name":"staticPage","value":{"pageTitle":"Converting your article to open access","pageBlurb":"Find information about publishing your article as open access","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"converting-your-article-to-open-access","introTitle":"Converting your article to open access","contentBlocks":[],"areas":["authors"]}},{"name":"staticPage","value":{"pageTitle":"Publishing Open Access - webinars","pageBlurb":"This session focuses on the basics of Open Access (OA) and presents evidence of the increased impact of choosing this option.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","slug":"publishing-open-access-webinars","introTitle":"Publishing Open Access - webinars","contentBlocks":[],"areas":["authors"]}}],"column3Heading":"Journals (cont.)","column3StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"Preparing and submitting your paper","pageBlurb":"Each journal published by Cambridge University Press is unique, pursuing a specific set of editorial aims.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"preparing-and-submitting-your-paper","introTitle":"Preparing and submitting your paper","introText":"Each journal published by Cambridge University Press is unique, pursuing a specific set of editorial aims. We strongly encourage authors to read the Instructions for Contributors document associated with their chosen journal as early as possible during the manuscript preparation process, to ensure full understanding of the journal's audience and scope. This document can be found via the homepage of each journal on Cambridge Core.","contentBlocks":[],"areas":["authors"]}},{"name":"staticPage","value":{"pageTitle":"Publishing an accepted paper","pageBlurb":"Once a paper is accepted by a journal's editor(s) it will be sent to Cambridge University Press to be prepared for publication. Efficiency, accuracy and quality are at the heart of this process.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"publishing-an-accepted-paper","introTitle":"Publishing an accepted paper","introText":"Once a paper is accepted by a journal's editor(s) it will be sent to Cambridge University Press to be prepared for publication. Efficiency, accuracy and quality are at the heart of this process. Authors will have access to a dedicated content manager, who will be available to answer questions and offer support as their article progresses through the steps towards publication.","contentBlocks":[],"areas":["authors"]}},{"name":"staticPage","value":{"pageTitle":"Promoting your published paper","pageBlurb":"Cambridge University Press is committed to making sure your paper reaches a broad international audience in order to maximise its scholarly impact. ","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"promoting-your-published-paper","introTitle":"Promoting your published paper","introText":"Cambridge University Press is committed to making sure your paper reaches a broad international audience in order to maximise its scholarly impact. Our marketing team runs regular promotions through a wide variety of marketing channels and Cambridge Core is designed to make your article easy to discover, access, read and cite.","contentBlocks":[],"areas":["authors"]}},{"name":"staticPage","value":{"pageTitle":"Measuring impact","pageBlurb":"This page explains the most common metrics used within scholarly publishing to measure impact at a journal level.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"measuring-impact","introTitle":"Measuring impact","introText":"This page explains the most common metrics used within scholarly publishing to measure impact at a journal level.","contentBlocks":[],"areas":["authors"]}},{"name":"staticPage","value":{"pageTitle":"Journals artwork guide","pageBlurb":"This guide will explain how to effectively prepare your artwork for electronic submission to our journals. ","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"journals-artwork-guide","introTitle":"Journals artwork guide","introText":"Submitting your illustrations, pictures and other artwork (such as multimedia and supplementary files) in an electronic format helps us produce your work to the best possible standards, ensuring accuracy, clarity, accessibility, and a high level of detail.","contentBlocks":[],"areas":["authors"]}},{"name":"staticPage","value":{"pageTitle":"Using ORCID","pageBlurb":"ORCID is a not-for-profit organization governed by an elected board. It provides a persistent digital identifier (an ORCID ID) for individual researchers.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"using-orcid","introTitle":"Using ORCID","introText":"ORCID is a not-for-profit organization governed by an elected board. It provides a persistent digital identifier (an ORCID ID) for individual researchers, and a registry of those identifiers, to support automated linkages between researchers and their professional activities and affiliations, through integration in research workflows such as manuscript and grant submission.","contentBlocks":[],"areas":["authors"]}}],"column4Heading":"Books","column4StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"Books","pageBlurb":"Cambridge University Press works closely with the global academic community to deliver the highest quality, peer-reviewed content, as well as a portfolio of innovative tools and services to advance learning and research.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"books","introTitle":"Books","introText":"Cambridge University Press works closely with the global academic community to deliver the highest quality, peer-reviewed content, as well as a portfolio of innovative tools and services to advance learning and research. Together with Cambridge University, our mission is to advance the work of researchers, students, lecturers, and librarians worldwide.","contentBlocks":[],"areas":["authors"]}},{"name":"staticPage","value":{"pageTitle":"Marketing your book","pageBlurb":"Cambridge provides marketing support across all relevant channels and will work closely with your Editor and the Sales team to ensure that your book reaches its target audience.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"marketing-your-book","introTitle":"Marketing your book","introText":"You will be assigned a dedicated Marketing Specialist who will be in touch soon to discuss the marketing plans for your book. They will provide marketing support across all relevant channels and will work closely with your Editor and the Sales team to ensure that your book reaches its target audience.","contentBlocks":[],"areas":["authors"]}},{"name":"staticPage","value":{"pageTitle":"Author guides for Cambridge Elements","pageBlurb":"Cambridge Elements user guides. Author guides, FAQs and style guides: find all the information about writing Cambridge Elements here.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"elements-user-guides","introTitle":"User guides for Cambridge Elements","introText":"Cambridge Elements user guides. Author guides, FAQs and style guides: find all the information about writing Cambridge Elements here.","contentBlocks":[],"areas":["authors"]}}],"slug":"authors"}},{"name":"navigationBarArea","value":{"title":"Corporates","hubPage":{"name":"staticPage","value":{"pageTitle":"Services for corporates","pageBlurb":"Discover more about commercial reprints, advertising, sponsorship, special sales, foreign rights and permissions.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"services-for-corporates","introTitle":"Services for corporates","contentBlocks":[],"areas":[]}},"column1Heading":"Corporates","column1StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"Commercial reprints","pageBlurb":"Providing hard copy and electronic reprints of articles in the majority of our journals. Our commercial sales team can provide quotes for permission to translate articles and the reuse of our journal content for a corporate client.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"commercial-reprints","introTitle":"Commercial reprints","introText":"Cambridge University Press provides hard copy and electronic reprints of articles in the majority of our journals. Our commercial sales team can also provide quotes for permission to translate articles and any other permission that involves the reuse of our journal content for a corporate client.","contentBlocks":[],"areas":["corporates"]}},{"name":"staticPage","value":{"pageTitle":"Advertising","pageBlurb":"With over 400 titles to choose from, our extensive list of journals – spanning 45 subject areas – means you will always reach your target audience.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"advertising","introTitle":"Advertising","introText":"With over 400 titles to choose from, our extensive list of journals – spanning 45 subject areas – means you will always reach your target audience.","contentBlocks":[],"areas":["corporates"]}},{"name":"staticPage","value":{"pageTitle":"Sponsorship","pageBlurb":"Sponsorship of journals, supplements, special or themed issues, collections and questions allows your organisation to work with Cambridge University Press Publishing.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"sponsorship","introTitle":"Sponsorship","introText":"Sponsorship of journals, supplements, special or themed issues, collections and questions allows your organisation to work with Cambridge University Press Publishing.","contentBlocks":[],"areas":["corporates"]}},{"name":"staticPage","value":{"pageTitle":"Book special sales","pageBlurb":"We offer a range of flexible print and digital customisation options to suit your organization’s needs.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"book-special-sales","introTitle":"Book special sales","introText":"We offer a range of flexible print and digital customisation options to suit your organization’s needs, working with not-for-profit organisations, educational institutions, corporate customers and pharmaceutical companies to creating co-branded editions with our partners.","contentBlocks":[],"areas":["corporates"]}},{"name":"staticPage","value":{"pageTitle":"Contact us","pageBlurb":"Contact Cambridge University Press for Book special sales, advertising, sponsorship and commercial reprints.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"contact-us","introTitle":"Contact us","introText":"Contact Cambridge University Press for Book special sales, advertising, sponsorship and commercial reprints.","contentBlocks":[],"areas":["corporates"]}}],"slug":"corporates"}},{"name":"navigationBarArea","value":{"title":"Editors","hubPage":{"name":"staticPage","value":{"pageTitle":"Editors","pageBlurb":"EditorTo empower our editors as advocates we provide dedicated tools, support and training throughout the publishing process, to raise journal visibility and cement its reputation in the field.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"editors","introTitle":"Editors","introText":"Editors make important contributions to the research community and ensure their journals feature work of the highest calibre. To empower our editors as advocates we provide dedicated tools, support and training throughout the publishing process, to raise journal visibility and cement its reputation in the field.","contentBlocks":[],"areas":[]}},"column1Heading":"Information","column1StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"Journal development","pageBlurb":"The editor is the journal’s ambassador, here are some of the ways to support the journal's development.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"journal-development","introTitle":"Journal development","introText":"Supporting the journal. The editor is the journal’s ambassador, here are some of the ways to support the journal's development:","contentBlocks":[],"areas":["editors"]}},{"name":"staticPage","value":{"pageTitle":"Peer review for editors","pageBlurb":"An Online Peer Review System (OPRS), works to help editors carry out the peer review process. A submission is anonymised before being uploaded to allow for unbiased assessment. ","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"peer-review-for-editors","introTitle":"Peer review for editors","introText":"An Online Peer Review System (OPRS), works to help editors carry out the peer review process. A submission is anonymised before being uploaded to allow for unbiased assessment. ","contentBlocks":[],"areas":["editors"]}},{"name":"staticPage","value":{"pageTitle":"Open access for editors","pageBlurb":"Open access (OA) is an important way to make research findings freely available for anyone to access and view.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","slug":"open-access-for-editors","introTitle":"Open access for editors","contentBlocks":[],"areas":["editors"]}},{"name":"staticPage","value":{"pageTitle":"Policies and guidelines","pageBlurb":"At Cambridge University Press, the integrity of our academic content and publishing process is paramount, and as our editors, you are our partners in supporting this mission. ","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"policies-and-guidelines","introTitle":"Policies and guidelines","introText":"At Cambridge University Press, the integrity of our academic content and publishing process is paramount, and as our editors, you are our partners in supporting this mission. ","contentBlocks":[],"areas":["editors"]}}],"column2Heading":"Resources","column2StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"The editor's role","pageBlurb":"Our editors ensure that their journals continue to evolve and have an impact on their research field.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","slug":"the-editors-role","introTitle":"The editor's role","contentBlocks":[],"areas":["editors"]}},{"name":"staticPage","value":{"pageTitle":"Open research for editors","pageBlurb":"Open research refers to a range of practices that are dramatically improving how researchers are publishing and sharing their work.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","slug":"open-research-for-editors","introTitle":"Open research for editors","contentBlocks":[],"areas":["editors"]}},{"name":"staticPage","value":{"pageTitle":"Engagement and promotion","pageBlurb":"There are a number of ways an editor can support the visibility and dissemination of their journal(s).","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","slug":"engagement-and-promotion","introTitle":"Engagement and promotion","contentBlocks":[],"areas":["editors"]}},{"name":"staticPage","value":{"pageTitle":"Blogging","pageBlurb":"Blogs are great for profiling new and exciting research or features in the journal. It is also a key way to attract search traffic.","linkImage":{"title":"A Guide to Blogging","description":"","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/5sLUiDKTGCXxU9GKcPcHSA/36bf1c9ca78ae1e2a47293d27fb32647/A_Guide_to_Blogging.png","details":{"size":56411,"image":{"width":1824,"height":894}},"fileName":"A Guide to Blogging.png","contentType":"image/png"}},"altText":"Editor Resources A Guide to Blogging","slug":"blogging","introTitle":"Blogging","introText":"Blogs are great for profiling new and exciting research or features in the journal. It is also a key way to attract search traffic.","contentBlocks":[],"areas":["editors"]}},{"name":"staticPage","value":{"pageTitle":"Social media","pageBlurb":"Social media is a great way to raise the profile and visibility of your journal. ","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","slug":"social-media","introTitle":"Social media","introText":"Social media is a great way to raise the profile and visibility of your journal. ","contentBlocks":[],"areas":["editors"]}}],"slug":"editors"}},{"name":"navigationBarArea","value":{"title":"Librarians","hubPage":{"name":"staticPage","value":{"pageTitle":"Librarians","pageBlurb":"Cambridge Core has primarily been designed to help your readers and researchers to make fast and easy journeys to valuable content.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","slug":"librarians","introTitle":"Librarians","contentBlocks":[],"areas":[]}},"column1Heading":"Information","column1StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"Open Access for Librarians","pageBlurb":"Welcome to your librarian hub for Open Access at Cambridge University Press. Open Access (OA) can be quite daunting, so we've created this page to help you navigate our different resources.","linkImage":{"title":"OA Librarian Website Header Banner","description":"","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/6eyfpqZUi4KEeOt7lsGKJg/41ef6beae2eee1a8304a95313aa08b64/OA-Librarian-Website-Header-Banner-Image-1-1500x300.jpg","details":{"size":348959,"image":{"width":1500,"height":300}},"fileName":"OA-Librarian-Website-Header-Banner-Image-1-1500x300.jpg","contentType":"image/jpeg"}},"altText":"Open Access for Librarians: get all your information about OA from Cambridge","slug":"open-access-for-librarians","introTitle":"Open Access for Librarians","introText":"Welcome to your librarian hub for Open Access at Cambridge University Press. Open Access (OA) can be quite daunting, so we've created this page to help you navigate our different resources. Learn about our OA policies, what we publish OA and how you can support your institution when publishing OA journals, books, monographs and Elements.","contentBlocks":[],"areas":["librarians"]}},{"name":"url","value":{"title":"Transformative agreements","url":"https://www.cambridge.org/core/services/open-access-policies/read-and-publish-agreements","ariaLabel":"Transformative agreements"}},{"name":"staticPage","value":{"pageTitle":"Transformative Agreements - FAQs","pageBlurb":"A transformative agreement is the combined provision of two services for one cost: OA publishing and reading access to all subscription journals.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"transformative-agreements-faqs","introTitle":"Transformative Agreements - FAQs","introText":"A transformative agreement is the combined provision of two services for one cost: OA publishing and reading access to all subscription journals.","contentBlocks":[],"areas":["librarians"]}},{"name":"staticPage","value":{"pageTitle":"Evidence based acquisition","pageBlurb":"Academic institutions can access Cambridge University Press ebook collections via an EBA - access more than 50,000 titles published by Cambridge and our publishing partners, before making decisions on which titles to buy in perpetuity. ","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"evidence-based-acquisition","introTitle":"Evidence based acquisition","introText":"Academic institutions can now access Cambridge University Press ebook collections via an EBA. Under this model, institutions are given access to an extensive online collection of more than 50,000 titles published by Cambridge and our publishing partners, before making decisions on which titles to buy in perpetuity. ","contentBlocks":[],"areas":["librarians"]}},{"name":"staticPage","value":{"pageTitle":"ebook news & updates","pageBlurb":"Welcome to your Cambridge University Press ebooks hub. This is your single reference page where you can learn the latest news on ebooks and Cambridge Core platform updates. ","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"ebook-news-and-updates","introTitle":"ebook news & updates","introText":"Welcome to your Cambridge University Press ebooks hub. This is your single reference page where you can learn the latest news on ebooks and Cambridge Core platform updates. ","contentBlocks":[],"areas":["librarians"]}},{"name":"staticPage","value":{"pageTitle":"Cambridge libraries of the world podcast","pageBlurb":"A brand-new podcast series for librarians to listen to and learn about topics at the heart of academic discourse, via interviews conducted with professors, researchers, students, librarians, and Cambridge personnel.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"cambridge-libraries-of-the-world-podcast","introTitle":"Cambridge libraries of the world podcast","introText":"A brand-new podcast series for librarians to listen to and learn about topics at the heart of academic discourse, via interviews conducted with professors, researchers, students, librarians, and Cambridge personnel. ","contentBlocks":[],"areas":["librarians"]}},{"name":"staticPage","value":{"pageTitle":"Purchasing models","pageBlurb":"Discover more about the purchasing options available to institutions for the content hosted on Cambridge Core.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"purchasing-models","introTitle":"Purchasing models","introText":"Below are a number of different purchasing options available to institutions for the content hosted on Cambridge Core.","contentBlocks":[],"areas":["librarians"]}},{"name":"staticPage","value":{"pageTitle":"Journals Publishing Updates","pageBlurb":"Another year of development for Cambridge University Press's journals publishing sees big changes coming for 2025.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","slug":"journals-publishing-updates","introTitle":"Journals Publishing Updates","introText":"Another year of development for Cambridge University Press's journals publishing sees big changes coming for 2025. Below is a preview of our list changes, including our new titles, titles that are converting to an open access publishing model, and details of our new series.","contentBlocks":[],"areas":["librarians"]}}],"column2Heading":"Products","column2StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"Cambridge frontlist","pageBlurb":"Our 2024 ebook collection contains the most recent, award-winning publishing from Cambridge University Press.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"cambridge-frontlist","introTitle":"Cambridge frontlist","introText":"Our 2024 ebook collection contains the most recent, award-winning publishing from Cambridge University Press.","contentBlocks":[],"areas":["librarians"]}},{"name":"staticPage","value":{"pageTitle":"Cambridge journals digital archive","pageBlurb":"The Cambridge journals digital archive provides instant online access to two centuries of academic excellence and publishing history from Cambridge University Press. ","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"cambridge-journals-digital-archive","introTitle":"Cambridge journals digital archive","introText":"The Cambridge journals digital archive provides instant online access to two centuries of academic excellence and publishing history from Cambridge University Press. Our constantly growing digital archive takes knowledge off the shelf and makes it readily available online, alongside our current Cambridge Core content.","contentBlocks":[],"areas":["librarians"]}},{"name":"staticPage","value":{"pageTitle":"Hot topics","pageBlurb":"Hot Topics from Cambridge are a series of bespoke ebook collections reflecting the latest research trends.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"hot-topics","introTitle":"Hot topics","introText":"Hot Topics from Cambridge are a series of bespoke ebook collections comprising titles from across a wide range of subjects, created to reflect the latest research trends. Drawing from the humanities and social sciences, in addition to the science, technology and medical disciplines, the collections highlight Cambridge’s depth of publishing.","contentBlocks":[],"areas":["librarians"]}},{"name":"staticPage","value":{"pageTitle":"Other digital products","pageBlurb":"Cambridge University Press publishes a wide variety of academic content online, some of which currently sits on other platforms.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","slug":"other-digital-products","introTitle":"Other digital products","introText":"Cambridge University Press publishes a wide variety of academic content online, some of which currently sits on other platforms.","contentBlocks":[],"areas":["librarians"]}},{"name":"staticPage","value":{"pageTitle":"Perpetual access products","pageBlurb":"Gain perpetual access to Cambridge University Press resources for a one-time payment. Details of products an offers can be found on this page.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"perpetual-access-products","introTitle":"Perpetual access products","introText":"Gain perpetual access to Cambridge University Press resources for a one-time payment. Details of products an offers can be found on this page.","contentBlocks":[],"areas":["librarians"]}},{"name":"staticPage","value":{"pageTitle":"Price list","pageBlurb":"Cambridge University Press 2025 price lists for journals, books and Elements. For subscription agents and academic institutions.","linkImage":{"title":"GettyImages-2148427272","description":"A row of books on a metal, library style, bookshelf. the books are view from the rear and a hand is reaching and picking a book.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1BYMYdALXI4YZz9sJKegO1/0c50d6ede207418c796a9b5691b5c20a/GettyImages-2148427272.jpg","details":{"size":185617,"image":{"width":1536,"height":1024}},"fileName":"GettyImages-2148427272.jpg","contentType":"image/jpeg"}},"altText":"A row of books on a metal, library style, bookshelf. the books are view from the rear and a hand is reaching and picking a book.","slug":"price-list","introTitle":"Price list","introText":"Cambridge University Press 2025 price lists for journals, books and Elements. For subscription agents and academic institutions.","contentBlocks":[],"areas":["librarians","agents"]}},{"name":"staticPage","value":{"pageTitle":"Developing country programme","pageBlurb":"Cambridge University Press has been working to improve the lives of people living in less economically developed parts of the world for nearly 500 years.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"developing-country-programme","introTitle":"Developing country programme","introText":"Cambridge University Press has been working to improve the lives of people living in less economically developed parts of the world for nearly 500 years.","contentBlocks":[],"areas":["librarians"]}},{"name":"staticPage","value":{"pageTitle":"New content","pageBlurb":"Discover new books and journals published by Cambridge University Press on Cambridge Core. Including information on titles moving to online only.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"new-content","introTitle":"New content","introText":"Discover new books and journals published by Cambridge University Press on Cambridge Core. Including information on titles moving to online only.","contentBlocks":[],"areas":["librarians"]}}],"column3Heading":"Tools","column3StaticPagesOrUrls":[{"name":"url","value":{"title":"Eligibility checker","url":"/core/eligibility-checker","ariaLabel":"Eligibility checker"}},{"name":"url","value":{"title":"Transformative agreements","url":"https://www.cambridge.org/core/services/open-access-policies/read-and-publish-agreements","ariaLabel":"Transformative agreements"}},{"name":"url","value":{"title":"KBART","url":"https://www.cambridge.org/core/services/librarians/kbart","ariaLabel":"KBART"}},{"name":"url","value":{"title":"MARC records","url":"https://www.cambridge.org/core/services/librarians/marc-records","ariaLabel":"MARC records"}},{"name":"staticPage","value":{"pageTitle":"Using MARCEdit for MARC records","pageBlurb":"Using MARCEdit for MARC records. To open/use MARC records from Cambridge Core, we recommend you use MARCEdit.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"using-marcedit-for-marc-records","introTitle":"Using MARCEdit for MARC records","introText":"Using MARCEdit for MARC records. To open/use MARC records from Cambridge Core, we recommend you use MARCEdit.","contentBlocks":[],"areas":["librarians"]}},{"name":"staticPage","value":{"pageTitle":"Inbound OpenURL specifications","pageBlurb":"Specifications to link directly to an article, book, chapter or journal page on Cambridge Core using an OpenURL link.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"inbound-openurl-specifications","introTitle":"Inbound OpenURL specifications","introText":"Use the following specifications to link directly to an article, book, chapter or journal page on Cambridge Core using an OpenURL link.","contentBlocks":[],"areas":["librarians"]}},{"name":"staticPage","value":{"pageTitle":"COUNTER report types","pageBlurb":"A librarian guide to COUNTER reporting and report types, as well as turnaway reports for books and journals.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"counter-report-types","introTitle":"COUNTER report types","introText":"A librarian guide to COUNTER reporting and report types, as well as turnaway reports for books and journals.","contentBlocks":[],"areas":["librarians"]}}],"column4Heading":"Resources","column4StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"Catalogues and resources","pageBlurb":"Discover resources that have been created to help you get the most value out of content on Cambridge Core. ","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"catalogues-and-resources","introTitle":"Catalogues and resources","introText":"These resources have been created to help you get the most value out of content on Cambridge Core. By promoting your Cambridge publications, you can increase usage and help readers discover new titles.","contentBlocks":[],"areas":["librarians"]}},{"name":"staticPage","value":{"pageTitle":"Making the most of your EBA","pageBlurb":"Evidence Based Acquisition (EBA) is an ebook purchase model that allows you to make informed decisions about which titles to own based on usage data.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"making-the-most-of-your-eba","introTitle":"Making the most of your EBA","introText":"Evidence Based Acquisition (EBA) is an ebook purchase model that allows you to make informed decisions about which titles to own based on usage data. We want to make sure that your EBA with Cambridge is a success, so we’ve put together a selection of resources to help with both raising awareness and ensuring discoverability of content.","contentBlocks":[],"areas":["librarians"]}},{"name":"staticPage","value":{"pageTitle":"Posters","pageBlurb":"Below are all posters for ebooks and journals available for you to download in two sizes - A3 and A4.","linkImage":{"title":"Posters","description":"An image showing the walkway between bookshelves in a library. The word \"Posters\" is superimposed over. ","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/3gpLCEAqNNQ89VoZWA6Aot/6ee8948bc1bd70553b19070431fc0818/Posters.jpg","details":{"size":64672,"image":{"width":424,"height":305}},"fileName":"Posters.jpg","contentType":"image/jpeg"}},"altText":"An image showing the walkway between bookshelves in a library. The word \"Posters\" is superimposed over. ","slug":"posters","introTitle":"Posters","introText":"Below are all posters for ebooks and journals available for you to download in two sizes - A3 and A4.","contentBlocks":[],"areas":["librarians"]}},{"name":"staticPage","value":{"pageTitle":"Leaflets and brochures","pageBlurb":"View and download our latest leaflets, brochures and catalogues, for products and resources, on Cambridge Core.","linkImage":{"title":"leaflets-and-brochures","description":"A close up of fingertips holding a ballpoint pen. The pen hovers about a notepad. Superimposed are the words \"Leaflets & Brochures\".","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/2vuFy7rj3ER6B7sH4iCAQW/f6ba1d6a1e00c2dbbf6e36e83a082e22/leaflets-and-brochures.jpg","details":{"size":42247,"image":{"width":424,"height":305}},"fileName":"leaflets-and-brochures.jpg","contentType":"image/jpeg"}},"altText":"A close up of fingertips holding a ballpoint pen. The pen hovers about a notepad. Superimposed are the words \"Leaflets & Brochures\".","slug":"leaflets-and-brochures","introTitle":"Leaflets and brochures","introText":"View and download our latest leaflets, brochures and catalogues, for products and resources, on Cambridge Core.","contentBlocks":[],"areas":["librarians"]}},{"name":"staticPage","value":{"pageTitle":"Additional resources","pageBlurb":"Download the Cambridge Core desktop background and logo to use on your library screens and in publicity material.","linkImage":{"title":"Additional-resources","description":"A close up of hands typing on a computer keyboard. Superimposed over the image are the words \"Additional Resources\". ","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/7IT9biHnN2eHG5WfwB7zmU/5cf73c7ade25233f31c709e359b2f78c/Additional-resources.jpg","details":{"size":39706,"image":{"width":424,"height":305}},"fileName":"Additional-resources.jpg","contentType":"image/jpeg"}},"altText":"A close up of hands typing on a computer keyboard. Superimposed over the image are the words \"Additional Resources\". ","slug":"additional-resources","introTitle":"Additional resources","introText":"Download the Cambridge Core desktop background and logo to use on your library screens and in publicity material.","contentBlocks":[],"areas":["librarians"]}},{"name":"staticPage","value":{"pageTitle":"Find my sales contact","pageBlurb":"Please find your sales contacts below by the appropriate region. Alternatively you can send general enquiries.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"find-my-sales-contact","introTitle":"Find my sales contact","introText":"Please find your sales contacts below by the appropriate region. Alternatively you can send general enquiries.","contentBlocks":[],"areas":["librarians"]}},{"name":"staticPage","value":{"pageTitle":"Webinars","pageBlurb":"The author publishing workflow and author outreach activities. How to approve articles using Rightslink Agreement Manager. How to analyse reporting from Cambridge University Press.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"webinars","introTitle":"Webinars","contentBlocks":[],"areas":["librarians"]}},{"name":"staticPage","value":{"pageTitle":"Read and publish resources","pageBlurb":"This page is dedicated to providing resources, messaging and training to help make your deal a success.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","slug":"read-and-publish-resources","introTitle":"Read and publish resources","contentBlocks":[],"areas":["librarians"]}}],"slug":"librarians"}},{"name":"navigationBarArea","value":{"title":"Peer review","hubPage":{"name":"staticPage","value":{"pageTitle":"Peer review","pageBlurb":"Peer review is the foundation of quality in research for both books and journals, ensuring that published research is rigorous, ethical and significant to the discipline in question.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"peer-review","introTitle":"Peer review","introText":"Peer review is the foundation of quality in research for both books and journals, ensuring that published research is rigorous, ethical and significant to the discipline in question. On these pages you'll find information about how to review both books and journal articles, as well as some frequently asked questions, notes on ethics in review, and news from Cambridge University Press relating to developments in review.","contentBlocks":[],"areas":[]}},"column1Heading":"Peer review","column1StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"How to peer review journal articles","pageBlurb":"Cambridge University Press has created a guide to give a practical introduction to conducting effective peer reviews, especially for those who are new to the process. ","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"how-to-peer-review-journal-articles","introTitle":"How to peer review journal articles","contentBlocks":[],"areas":["peer-review"]}},{"name":"staticPage","value":{"pageTitle":"How to peer review book proposals","pageBlurb":"Cambridge University Press has created a guide to give a practical introduction to conducting effective peer reviews, especially for those who are new to the process.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"how-to-peer-review-book-proposals","introTitle":"How to peer review book proposals","contentBlocks":[],"areas":["peer-review"]}},{"name":"staticPage","value":{"pageTitle":"How to peer review Registered Reports","pageBlurb":"This page offers guidance for peer reviewers about Registered Reports, a publishing format where a research article is published in two stages, each stage undergoing a separate peer review process.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","slug":"how-to-peer-review-registered-reports","introTitle":"How to peer review Registered Reports","contentBlocks":[],"areas":["peer-review"]}},{"name":"staticPage","value":{"pageTitle":"Peer review FAQs","pageBlurb":"Please note that these frequently asked questions have been written with journal articles in mind, though some may be applicable to reviewing book proposals.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"peer-review-faqs","introTitle":"Peer review FAQs","introText":"Please note that these frequently asked questions have been written with journal articles in mind, though some may be applicable to reviewing book proposals.","contentBlocks":[],"areas":["peer-review"]}},{"name":"staticPage","value":{"pageTitle":"Ethics in peer review","pageBlurb":"Cambridge University Press is committed to publishing ethics in all areas. Discover more about the five main areas of ethical considerations in peer review.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"ethics-in-peer-review","introTitle":"Ethics in peer review","introText":"Cambridge University Press is committed to publishing ethics in all areas. Discover more about the five main areas of ethical considerations in peer review.","contentBlocks":[],"areas":["peer-review"]}},{"name":"staticPage","value":{"pageTitle":"Online peer review systems","pageBlurb":"Where you complete and/or submit your review will vary depending on the journal. At Cambridge University Press, we use two online peer review systems called Scholar One and Editorial Manager, where you'll be expected to submit your review.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"online-peer-review-systems","introTitle":"Online peer review systems","introText":"Where you complete and/or submit your review will vary depending on the journal. At Cambridge University Press, we use two online peer review systems called Scholar One and Editorial Manager, where you'll be expected to submit your review. These systems are user-friendly and should walk you through the process of submitting your review.","contentBlocks":[],"areas":["peer-review"]}},{"name":"staticPage","value":{"pageTitle":"A guide to Publons","pageBlurb":"Cambridge University Press has created a guide to Publons to explain what it is and the benefits of using it as a reviewer.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"a-guide-to-publons","introTitle":"A guide to Publons","introText":"Publons has recently been rebranded as the Web of Science Reviewer Recognition Service during its integration with the Web of Science. ","contentBlocks":[],"areas":["peer-review"]}}],"slug":"peer-review"}},{"name":"navigationBarArea","value":{"title":"Publishing ethics","hubPage":{"name":"staticPage","value":{"pageTitle":"Publishing ethics","pageBlurb":"As a leading publisher of scholarly journals and books, Cambridge University Press is committed to meeting high standards of ethical behaviour at all stages of the publication process.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"publishing-ethics","introTitle":"Publishing ethics","introText":"As a leading publisher of scholarly journals and books, Cambridge University Press is committed to meeting high standards of ethical behaviour at all stages of the publication process. Our research publishing ethics guidelines outline the publishing ethics responsibilities of Cambridge University Press, authors, peer reviewers and editors.","contentBlocks":[],"areas":[]}},"column1Heading":"Journals ","column1StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"Publishing ethics guidelines for journals","pageBlurb":"At Cambridge University Press, the integrity of our academic content and publishing process is paramount. These guidelines outline the best practice principles that we apply to our Academic content. ","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"publishing-ethics-guidelines-journals","introTitle":"Publishing ethics guidelines for journals","introText":"At Cambridge University Press, the integrity of our academic content and publishing process is paramount. These guidelines outline the best practice principles that we apply to our Academic content. We hope these guidelines will be useful to many different groups, including authors, peer reviewers, editors within and outside of Cambridge University Press, societies, publishing partners and funders.","contentBlocks":[],"areas":["publishing-ethics"]}},{"name":"staticPage","value":{"pageTitle":"Core editorial policies for journals","pageBlurb":"This page contains information about our publishing principles, research integrity and academic freedom - as well as a number of other aspects of our editorial policies.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"core-editorial-policies-journals","introTitle":"Core editorial policies for journals","introText":"This page contains information about our publishing principles, research integrity and academic freedom - as well as a number of other aspects of our editorial policies.","contentBlocks":[],"areas":["publishing-ethics"]}},{"name":"staticPage","value":{"pageTitle":"Authorship and contributorship for journals","pageBlurb":"We acknowledge that different disciplines and publication formats have different norms for who is listed as an author.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"authorship-and-contributorship-journals","introTitle":"Authorship and contributorship for journals","introText":"We acknowledge that different disciplines and publication formats have different norms for who is listed as an author. We expect all authors on any content submitted to Cambridge to be in agreement that the authors listed would all be considered authors according to disciplinary norms, and that no authors who would reasonably be considered an author have been excluded. ","contentBlocks":[],"areas":["publishing-ethics"]}},{"name":"staticPage","value":{"pageTitle":"Affiliations for journals","pageBlurb":"Any affiliations should represent the institution(s) at which the research presented was conducted and/or supported and/or approved.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"affiliations-journals","introTitle":"Affiliations for journals","introText":"Any affiliations should represent the institution(s) at which the research presented was conducted and/or supported and/or approved. For non-research content, any affiliations should represent the institution(s) with which each author is currently affiliated.","contentBlocks":[],"areas":["publishing-ethics"]}},{"name":"staticPage","value":{"pageTitle":"Research ethics for journals","pageBlurb":"Cambridge University Press expects all contributors to align with ethics of research when researching and writing.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"research-ethics-journals","introTitle":"Research ethics for journals","introText":"Cambridge University Press expects all contributors to align with ethics of research when researching and writing.","contentBlocks":[],"areas":["publishing-ethics"]}},{"name":"staticPage","value":{"pageTitle":"Competing interests and funding for journals","pageBlurb":"Authors submitting a journal manuscript to Cambridge University Press, employees, the SAPC, editors and reviewers of Cambridge University Press publications, are required to declare any potential competing interests.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"competing-interests-and-funding-journals","introTitle":"Competing interests and funding for journals","introText":"Authors submitting a journal manuscript to Cambridge University Press, employees, the SAPC, editors and reviewers of Cambridge University Press publications, are required to declare any potential competing interests that could interfere with the objectivity or integrity of a publication. ","contentBlocks":[],"areas":["publishing-ethics"]}}],"column2Heading":"Journals (cont.)","column2StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"Data and supporting evidence for journals","pageBlurb":"We advocate for transparency and openness around data, code, and other materials associated with research, and we are a signatory of the Transparency and Openness Promotion (TOP) Guidelines. ","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"data-and-supporting-evidence-for-journals","introTitle":"Data and supporting evidence for journals","contentBlocks":[],"areas":["publishing-ethics"]}},{"name":"staticPage","value":{"pageTitle":"Misconduct for journals","pageBlurb":"The principles of research integrity - honesty, transparency, accountability, care and respect - are encompassed by our core editorial policies described above.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"misconduct-journals","introTitle":"Misconduct for journals","introText":"The principles of research integrity - honesty, transparency, accountability, care and respect - are encompassed by our core editorial policies described above.","contentBlocks":[],"areas":["publishing-ethics"]}},{"name":"staticPage","value":{"pageTitle":"Corrections, retractions and removals for journals","pageBlurb":"Cambridge University Press policies and information regarding corrections, retractions and removals in published content.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"corrections-retractions-and-removals-journals","introTitle":"Corrections, retractions and removals for journals","introText":"Cambridge University Press policies and information regarding corrections, retractions and removals in published content.","contentBlocks":[],"areas":["publishing-ethics"]}},{"name":"staticPage","value":{"pageTitle":"Versions and adaptations for journals","pageBlurb":"Our publications are distributed in many different global, cultural, environmental and economic contexts. We may therefore issue different versions of some of our products in order to cater to these contexts. ","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"versions-and-adaptations-journals","introTitle":"Versions and adaptations for journals","contentBlocks":[],"areas":["publishing-ethics"]}},{"name":"staticPage","value":{"pageTitle":"Libel, defamation and freedom of expression","pageBlurb":"Freedom of expression is critical to us as academic publishers, but we do not support publishing false statements that harm the reputation of individuals, groups or organisations. Our legal team will address allegations of libel in our publications.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"libel-defamation-and-freedom-of-expression","introTitle":"Libel, defamation and freedom of expression","introText":"Freedom of expression is critical to us as academic publishers, but we do not support publishing false statements that harm the reputation of individuals, groups or organisations. Our legal team will address allegations of libel in any of our publications.","areas":["publishing-ethics"]}},{"name":"staticPage","value":{"pageTitle":"Business ethics journals","pageBlurb":"Business ethics: fair access, censorship, marketing communication, advertising, sponsorship, PR / media and metrics, usage and reporting. ","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"business-ethics-journals","introTitle":"Business ethics journals","introText":"Business ethics: fair access, censorship, marketing communication, advertising, sponsorship, PR / media and metrics, usage and reporting. ","contentBlocks":[],"areas":["publishing-ethics"]}}],"column3Heading":"Books","column3StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"Publishing ethics guidelines for books","pageBlurb":"At Cambridge University Press, the integrity of our academic content and publishing process is paramount. These guidelines outline the best practice principles that we apply to our Academic content. ","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"publishing-ethics-guidelines-books","introTitle":"Publishing ethics guidelines for books","introText":"At Cambridge University Press, the integrity of our academic content and publishing process is paramount. These guidelines outline the best practice principles that we apply to our Academic content. We hope these guidelines will be useful to many different groups, including authors, peer reviewers, editors within and outside of Cambridge University Press, societies, publishing partners and funders. ","contentBlocks":[],"areas":["publishing-ethics"]}},{"name":"staticPage","value":{"pageTitle":"Core editorial policies for books","pageBlurb":"This page contains information about our publishing principles, research integrity and academic freedom - as well as a number of other aspects of our editorial policies.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"core-editorial-policies-books","introTitle":"Core editorial policies for books","introText":"This page contains infromation about our publishing principles, research integrity and academic freedom - as well as a number of other aspects of our editorial policies.","contentBlocks":[],"areas":["publishing-ethics"]}},{"name":"staticPage","value":{"pageTitle":"Authorship and contributorship for books","pageBlurb":"We acknowledge that different disciplines and publication formats have different norms for who is listed as an author.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"authorship-and-contributorship-books","introTitle":"Authorship and contributorship for books","introText":"We acknowledge that different disciplines and publication formats have different norms for who is listed as an author. We expect all authors on any content submitted to Cambridge to be in agreement that the authors listed would all be considered authors according to disciplinary norms, and that no authors who would reasonably be considered an author have been excluded.","contentBlocks":[],"areas":["publishing-ethics"]}},{"name":"staticPage","value":{"pageTitle":"Affiliations for books","pageBlurb":"Any affiliations should represent the institution(s) at which the research presented was conducted and/or supported and/or approved.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"affiliations-books","introTitle":"Affiliations for books","introText":"Any affiliations should represent the institution(s) at which the research presented was conducted and/or supported and/or approved. For non-research content, any affiliations should represent the institution(s) with which each author is currently affiliated.","contentBlocks":[],"areas":["publishing-ethics"]}},{"name":"staticPage","value":{"pageTitle":"Research ethics for books","pageBlurb":"Cambridge University Press expects all contributors to align with ethics of research when researching and writing.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"research-ethics-books","introTitle":"Research ethics for books","introText":"Cambridge University Press expects all contributors to align with ethics of research when researching and writing.","contentBlocks":[],"areas":["publishing-ethics"]}},{"name":"staticPage","value":{"pageTitle":"Competing interests and funding for books","pageBlurb":"Authors submitting a book or Element manuscript to Cambridge University Press, employees, the SAPC, editors and reviewers of Cambridge University Press publications, must declare any potential competing interests.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"competing-interests-and-funding-books","introTitle":"Competing interests and funding for books","introText":"Authors submitting a book or Element manuscript to Cambridge University Press, employees, the SAPC, editors and reviewers of Cambridge University Press publications, are required to declare any potential competing interests that could interfere with the objectivity or integrity of a publication. ","contentBlocks":[],"areas":["publishing-ethics"]}}],"column4Heading":"Books (cont.)","column4StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"Data and supporting evidence for books","pageBlurb":"We advocate for transparency and openness around data, code, and other materials associated with research, and we are a signatory of the Transparency and Openness Promotion (TOP) Guidelines. ","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"data-and-supporting-evidence-books","introTitle":"Data and supporting evidence for books","contentBlocks":[],"areas":["publishing-ethics"]}},{"name":"staticPage","value":{"pageTitle":"Misconduct for books","pageBlurb":"The principles of research integrity - honesty, transparency, accountability, care and respect - are encompassed by our core editorial policies described above.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"misconduct-books","introTitle":"Misconduct for books","introText":"The principles of research integrity - honesty, transparency, accountability, care and respect - are encompassed by our core editorial policies described above.","contentBlocks":[],"areas":["publishing-ethics"]}},{"name":"staticPage","value":{"pageTitle":"Corrections, retractions and removals for books","pageBlurb":"Cambridge University Press policies and information regarding corrections, retractions and removals in published content.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"corrections-retractions-and-removals-books","introTitle":"Corrections, retractions and removals for books","introText":"Cambridge University Press policies and information regarding corrections, retractions and removals in published content.","contentBlocks":[],"areas":["publishing-ethics"]}},{"name":"staticPage","value":{"pageTitle":"Versions and adaptations for books","pageBlurb":"Our publications are distributed in many different global, cultural, environmental and economic contexts. We may therefore issue different versions of some of our products in order to cater to these contexts. ","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"versions-and-adaptations-books","introTitle":"Versions and adaptations for books","contentBlocks":[],"areas":["publishing-ethics"]}},{"name":"staticPage","value":{"pageTitle":"Libel, defamation and freedom of expression","pageBlurb":"Freedom of expression is critical to us as academic publishers, but we do not support publishing false statements that harm the reputation of individuals, groups or organisations. Our legal team will address allegations of libel in our publications.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"libel-defamation-and-freedom-of-expression","introTitle":"Libel, defamation and freedom of expression","introText":"Freedom of expression is critical to us as academic publishers, but we do not support publishing false statements that harm the reputation of individuals, groups or organisations. Our legal team will address allegations of libel in any of our publications.","areas":["publishing-ethics"]}},{"name":"staticPage","value":{"pageTitle":"Business ethics books","pageBlurb":"Business ethics: fair access, censorship, marketing communication, advertising, sponsorship, PR / media and metrics, usage and reporting. ","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"business-ethics-books","introTitle":"Business ethics books","introText":"Business ethics: fair access, censorship, marketing communication, advertising, sponsorship, PR / media and metrics, usage and reporting. ","contentBlocks":[],"areas":["publishing-ethics"]}}],"slug":"publishing-ethics"}},{"name":"navigationBarArea","value":{"title":"Publishing partners","hubPage":{"name":"staticPage","value":{"pageTitle":"Publishing partnerships","pageBlurb":"Over 220 world-leading societies and presses partner with us on publications including journals, book series, early research and e-books. ","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"publishing-partnerships","introTitle":"Publishing partnerships","contentBlocks":[],"areas":["publishing-partners"]}},"column1Heading":"Publishing partners","column1StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"Publishing partnerships","pageBlurb":"Over 220 world-leading societies and presses partner with us on publications including journals, book series, early research and e-books. ","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"publishing-partnerships","introTitle":"Publishing partnerships","contentBlocks":[],"areas":["publishing-partners"]}},{"name":"staticPage","value":{"pageTitle":"Partner books","pageBlurb":"Books published for our society partners are given the same high quality production as our own publications, and are published throughout the world in print and electronic formats as appropriate.","linkImage":{"title":"PartnersBooks","description":"","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/2U1gsLlZ2xJxn8N97MOIAt/2604acf1f3e6274808a94cc3e0562509/PartnersBooks.jpg","details":{"size":32409,"image":{"width":500,"height":300}},"fileName":"PartnersBooks.jpg","contentType":"image/jpeg"}},"altText":"A background of dark blue with the word 'Books' in white text. The background has a pattern of dots forming circles.","slug":"partner-books","introTitle":"Partner books","contentBlocks":[],"areas":["publishing-partners"]}},{"name":"staticPage","value":{"pageTitle":"eBook publishing partnerships","pageBlurb":"Publishing partner ebook titles are delivered to customers on Cambridge Core. The publisher logo also appears on each individual book page, to ensure it is always clear that the ebook is published by a publishing partners.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","slug":"ebook-publishing-partnerships","introTitle":"eBook publishing partnerships","contentBlocks":[],"areas":["publishing-partners"]}},{"name":"staticPage","value":{"pageTitle":"Journal publishing partnerships","pageBlurb":"Cambridge University Press has a proud history of forming productive and enduring publishing relationships with learned societies, universities, and professional associations.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","slug":"journal-publishing-partnerships","introTitle":"Journal publishing partnerships","introText":"Cambridge University Press has a proud history of forming productive and enduring publishing relationships with learned societies, universities, and professional associations. Our mission is to unlock people's potential with the best learning and research solutions.","contentBlocks":[],"areas":["publishing-partners"]}}],"column2Heading":"Publishing partners (cont.)","column2StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"Journals publishing","pageBlurb":"We pride ourselves on offering a truly collaborative publishing service which has journal development at its core.","linkImage":{"title":"journals publishing","description":"","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/3pWJoXbWYEFDv5DOiBQs7I/8c4dedff525b7170cfe631f4f66b88e7/journals_publishing.JPG","details":{"size":7595,"image":{"width":177,"height":155}},"fileName":"journals publishing.JPG","contentType":"image/jpeg"}},"altText":"An image of the outline of a book or journal with grey arrows around it. This is on a white circle, which is against a blue background.","slug":"journals-publishing","introTitle":"Journals publishing ","introText":"We pride ourselves on offering a truly collaborative publishing service which has journal development at its core. The range of services we provide includes:","contentBlocks":[],"areas":["publishing-partners"]}},{"name":"staticPage","value":{"pageTitle":"Customer support","pageBlurb":"Our Customer Services teams have units dedicated to library support, handling print fulfilment and claims, as well as online training and access","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, with stars and galaxies. The dark outline of the Cambridge shield is superimposed over the top. ","slug":"customer-support","introTitle":"Customer support","introText":"Our Customer Services teams have units dedicated to library support, handling print fulfilment and claims, as well as online training and access.","contentBlocks":[],"areas":["publishing-partners"]}},{"name":"staticPage","value":{"pageTitle":"Membership Services","pageBlurb":"We provide a range of services to our society partners, including access to journal content, discounts on Cambridge University Press publications and customer service support.","linkImage":{"title":"membership services","description":"","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/74cDq9dHVmVPJDQwvqN9ed/00f3dfbab0c2b1f79dea2c40dabb47e6/membership_services.JPG","details":{"size":8177,"image":{"width":169,"height":156}},"fileName":"membership services.JPG","contentType":"image/jpeg"}},"altText":"Line image of a person in blue on a white circle background, against a blue square. Around the person is a curved circular arrow. ","slug":"membership-services","introTitle":"Membership Services","introText":"We provide a range of services to the members of our society partners, including access to journal content (current and archival), discounts on Cambridge University Press publications and customer service support to ensure that any queries or problems are resolved quickly.","contentBlocks":[],"areas":["publishing-partners"]}},{"name":"staticPage","value":{"pageTitle":"Our Team","pageBlurb":"If you would like to discuss a potential journal publishing collaboration, please contact us to discuss your requirements or ideas.","linkImage":{"title":"Our team","description":"","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/3DbyRtmwHXuXmdpmkQTTd4/fb83164a4b8328415057478c6a81fde7/Our_team.JPG","details":{"size":8368,"image":{"width":184,"height":157}},"fileName":"Our team.JPG","contentType":"image/jpeg"}},"altText":"Line drawing of three people. One is shaded grey and the other two are without fill on a white circle. The circle is against a blue background.","slug":"our-team","introTitle":"Our Team","introText":"If you would like to discuss a potential journal publishing collaboration, please contact us to discuss your requirements or ideas.","contentBlocks":[],"areas":["publishing-partners"]}}],"slug":"publishing-partners"}}]}},{"name":"navigationBarCategory","value":{"title":"Open research","navigationBarArea":[{"name":"navigationBarArea","value":{"title":"Open access policies","hubPage":{"name":"staticPage","value":{"pageTitle":"Open access policies","pageBlurb":"Open access (OA) has become an important way to make research findings freely available for anyone to access and view. Open access serves authors and the wider community by publishing high-quality, peer-reviewed OA content.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"open-access-policies","introTitle":"Open access policies","introText":"Open access (OA) has become an important way to make research findings freely available for anyone to access and view. Open access serves authors and the wider community by publishing high-quality, peer-reviewed OA content. We support and promote all forms of OA that are financially sustainable. Our introduction to open access page provides some essential information about the types of OA we offer.","contentBlocks":[],"areas":["open-research-policies"]}},"column1Heading":"Open access policies","column1StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"Open research","pageBlurb":"The open research movement seeks to maximise the impact and benefits of research by prioritising barrier-free access to research findings, data and methodologies.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"open-research","introTitle":"Open research","introText":"The open research movement seeks to maximise the impact and benefits of research by prioritising barrier-free access to research findings, data and methodologies. Open research reflects a fundamental belief that the pursuit of knowledge benefits directly from collaboration, transparency, rapid dissemination and accessibility.","contentBlocks":[],"areas":["open-access-publishing","open-research-policies"]}},{"name":"staticPage","value":{"pageTitle":"Open access policies","pageBlurb":"Open access (OA) has become an important way to make research findings freely available for anyone to access and view. Open access serves authors and the wider community by publishing high-quality, peer-reviewed OA content.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"open-access-policies","introTitle":"Open access policies","introText":"Open access (OA) has become an important way to make research findings freely available for anyone to access and view. Open access serves authors and the wider community by publishing high-quality, peer-reviewed OA content. We support and promote all forms of OA that are financially sustainable. Our introduction to open access page provides some essential information about the types of OA we offer.","contentBlocks":[],"areas":["open-research-policies"]}},{"name":"staticPage","value":{"pageTitle":"Cambridge University Press and Plan S","pageBlurb":"Our goal to transform the journals we publish to open research depends on a continued, widespread move to funding for Open Access (OA) around the world.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"cambridge-university-press-and-plan-s","introTitle":"Cambridge University Press and Plan S","contentBlocks":[],"areas":["open-research-policies"]}},{"name":"staticPage","value":{"pageTitle":"Text and data mining","pageBlurb":"We believe that text and data mining (TDM) is an important and powerful research tool with incredible potential. ","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"text-and-data-mining","introTitle":"Text and data mining","introText":"We believe that text and data mining (TDM) is an important and powerful research tool with incredible potential. ","contentBlocks":[],"areas":["open-research-policies"]}},{"name":"staticPage","value":{"pageTitle":"Preprint policy","pageBlurb":"Discover full details of our Preprint policy at Cambridge University Press ","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"preprint-policy","introTitle":"Preprint policy at Cambridge University Press","contentBlocks":[],"areas":["open-research-policies"]}},{"name":"staticPage","value":{"pageTitle":"Social sharing","pageBlurb":"Content sharing is a natural and vital part of research. It helps to disseminate and raise awareness about new findings and to stimulate discussion and further progress.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"social-sharing","introTitle":"Social Sharing","introText":"Many authors and readers share journal and book content with others. Some of the most common ways that content is shared are:","contentBlocks":[],"areas":["open-research-policies"]}}],"column2Heading":"Journals","column2StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"Open access journals","pageBlurb":"We have a growing list of journals that publish Gold Open Access articles under Creative Commons licences.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"open-access-journals","introTitle":"Open access journals","contentBlocks":[],"areas":["open-research-policies"]}},{"name":"staticPage","value":{"pageTitle":"Gold open access journals","pageBlurb":"Many of our journals publish articles as Gold Open Access, under Creative commons (CC) licences, enabling readers to freely access and re-distribute their articles.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"gold-open-access-journals","introTitle":"Gold Open Access in journals","introText":"Many of our journals publish articles as Gold Open Access, under Creative commons (CC) licences, enabling readers to freely access and re-distribute their articles. In some journals Gold OA is an option offered to authors, while in other journals, some or all article types are always published Gold OA.","contentBlocks":[],"areas":["open-research-policies"]}},{"name":"staticPage","value":{"pageTitle":"Transformative journals","pageBlurb":"The journals below have committed to transition to open research and meet the transformative journals requirements of Plan S. Download our 2021 and 2022 TJ progress reports to see each journal's progression to full OA.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"transformative-journals","introTitle":"Transformative journals","introText":"The journals below have committed to transition to open research and meet the transformative journals requirements of Plan S. Download our 2021 and 2022 TJ progress reports to see each journal's progression to full OA.","contentBlocks":[],"areas":["open-research-policies"]}},{"name":"staticPage","value":{"pageTitle":"Green Open Access policy for journals","pageBlurb":"Discover details of how our Green Open Access policy permits authors of journal articles published by Cambridge to share versions of their work online.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"green-open-access-policy-for-journals","introTitle":"Green Open Access policy for journals","contentBlocks":[],"areas":["open-research-policies"]}},{"name":"staticPage","value":{"pageTitle":"Transparent pricing policy for journals","pageBlurb":"We aim to price our journals fairly and transparently. In particular, our subscription prices should reflect the amount of subscription content in a journal.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"transparent-pricing-policy-for-journals","introTitle":"Transparent pricing policy for journals","introText":"We aim to price our journals fairly and transparently. In particular, our subscription prices should reflect the amount of subscription content in a journal.","contentBlocks":[],"areas":["open-research-policies"]}}],"column3Heading":"Books and Elements","column3StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"Open access books","pageBlurb":"Open access (OA) is emerging as a new model for book publishing. We support OA books in line with our mission to disseminate knowledge at the highest international levels of excellence.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"open-access-books","introTitle":"Open access books","introText":"Open access (OA) is emerging as a new model for book publishing. We support OA books in line with our commitment to exploring publishing models that are financially sustainable, scalable and in accordance with our mission to disseminate knowledge at the highest international levels of excellence.","contentBlocks":[],"areas":["open-research-policies"]}},{"name":"staticPage","value":{"pageTitle":"Gold open access books","pageBlurb":"We offer authors the option of publishing their work as open access. Discover more information about our gold open access books on our dedicated information page. ","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"gold-open-access-books","introTitle":"Gold open access books","introText":"We offer authors the option of publishing their work as open access to allow them to make their works freely available online without compromising any aspect of the publishing process. Typically, we offer this option only for monographs, which are books usually written by a single author to convey the results of their research and analysis in a particular field of study.","contentBlocks":[],"areas":["open-research-policies"]}},{"name":"staticPage","value":{"pageTitle":"Green Open Access policy for books","pageBlurb":"Discover details of how our Green Open Access policy permits authors of books published by Cambridge to share versions of their work online.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","slug":"green-open-access-policy-for-books","introTitle":"Green Open Access policy for books","contentBlocks":[],"areas":["open-research-policies"]}},{"name":"staticPage","value":{"pageTitle":"Open access Elements","pageBlurb":"Discover information about how to publish an Element Gold Open Access, as well as details of our Green Open Access policy for Elements.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"open-access-elements","introTitle":"Open access Elements","introText":"We offer authors the option of publishing their Elements as open access to allow them to make their works freely available online without compromising any aspect of the publishing process.","contentBlocks":[],"areas":["open-research-policies"]}}],"slug":"open-research-policies"}},{"name":"navigationBarArea","value":{"title":"Open access publishing","hubPage":{"name":"staticPage","value":{"pageTitle":"What is open access?","pageBlurb":"Open access (OA) is an important way to make research findings freely available for anyone to access and view.","linkImage":{"title":"What is open access","description":"Dark yellow vertical stripes fill a shield shape, representative of the Cambridge University Press logo, on a light yellow background.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/7sEQ4K9gF64X6oCYQm5gHg/74f9d5bc0f37e8f4ffe68bd177a5118c/What_is_open_access.png","details":{"size":4791,"image":{"width":450,"height":270}},"fileName":"What is open access.png","contentType":"image/png"}},"altText":"Dark yellow vertical stripes fill a shield shape, representative of the Cambridge University Press logo, on a light yellow background.","slug":"open-access","introTitle":"What is open access?","introText":"Open access (OA) makes research findings freely available for anyone to access and view. Authors will benefit from the open availability of their research to others, leading to an increase in the visibility and usage of their work. Open access allows authors to comply with the Gold and Green OA requirements of major funders.","contentBlocks":[],"areas":["open-access-publishing"]}},"banner":{"title":"1","description":"a mountain range in mist. a red shield is superimposed on the right.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/tg5ltxgzVAAUpHiLcO8IM/8799f338869f552c0974163f621af29a/1.png","details":{"size":2466105,"image":{"width":2560,"height":1440}},"fileName":"1.png","contentType":"image/png"}},"column1Heading":"About open access","column1StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"Open research","pageBlurb":"The open research movement seeks to maximise the impact and benefits of research by prioritising barrier-free access to research findings, data and methodologies.","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"open-research","introTitle":"Open research","introText":"The open research movement seeks to maximise the impact and benefits of research by prioritising barrier-free access to research findings, data and methodologies. Open research reflects a fundamental belief that the pursuit of knowledge benefits directly from collaboration, transparency, rapid dissemination and accessibility.","contentBlocks":[],"areas":["open-access-publishing","open-research-policies"]}},{"name":"staticPage","value":{"pageTitle":"Open Access Week","pageBlurb":"Open Access Week 2024 | Explore how we're working to build an open equitable future for all authors. We believe in the quality of your research rather than your ability to pay; so explore our routes to fund your open access publication.","linkImage":{"title":"Open Access Week","description":"Cambridge Blue vertical stripes fill a shield shape, representative of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/116LrrCNfUgNVzHbAtJ8Xj/b8f072a1ffc9713d156e6c96c91ef5d4/OA_Week.png","details":{"size":4883,"image":{"width":450,"height":270}},"fileName":"OA Week.png","contentType":"image/png"}},"altText":"Cambridge Blue vertical stripes fill a shield shape, representative of the Cambridge University Press logo.","slug":"open-access-week","introTitle":"Open Access Week","introText":"Welcome to Open Access Week 2024! We're working to build an open, equitable future for all authors around the world. Find out more about how we're supporting authors from low- and middle-income countries with our award-winning Cambridge Open Equity Initiative, our open access book-funding programme, Flip it Open, and more. ","contentBlocks":[],"areas":["open-access-publishing"]}},{"name":"staticPage","value":{"pageTitle":"What is open access?","pageBlurb":"Open access (OA) is an important way to make research findings freely available for anyone to access and view.","linkImage":{"title":"What is open access","description":"Dark yellow vertical stripes fill a shield shape, representative of the Cambridge University Press logo, on a light yellow background.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/7sEQ4K9gF64X6oCYQm5gHg/74f9d5bc0f37e8f4ffe68bd177a5118c/What_is_open_access.png","details":{"size":4791,"image":{"width":450,"height":270}},"fileName":"What is open access.png","contentType":"image/png"}},"altText":"Dark yellow vertical stripes fill a shield shape, representative of the Cambridge University Press logo, on a light yellow background.","slug":"open-access","introTitle":"What is open access?","introText":"Open access (OA) makes research findings freely available for anyone to access and view. Authors will benefit from the open availability of their research to others, leading to an increase in the visibility and usage of their work. Open access allows authors to comply with the Gold and Green OA requirements of major funders.","contentBlocks":[],"areas":["open-access-publishing"]}},{"name":"staticPage","value":{"pageTitle":"Open access glossary","pageBlurb":"Open access has many unique terms, acronyms and additional information. Read our useful glossary to discover more.","linkImage":{"title":"Open access glossary","description":"Bright pink dot pattern fill a shield shape, representative of the Cambridge University Press logo, on a dark pink background.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/7cLxXpMkrUCEEHVu2Brt1b/69ca9bc044cd4698fce66e6ca8224fd7/OA_glossary.png","details":{"size":58518,"image":{"width":450,"height":270}},"fileName":"OA glossary.png","contentType":"image/png"}},"altText":"Bright pink dot pattern fill a shield shape, representative of the Cambridge University Press logo, on a dark pink background.","slug":"open-access-glossary","introTitle":"Open access glossary","introText":"Open access has many unique terms, acronyms and additional information. Read our useful glossary to discover more.","contentBlocks":[],"areas":["open-access-publishing"]}},{"name":"staticPage","value":{"pageTitle":"Open access myths","pageBlurb":"Open access (OA) is an important way to make high-quality, peer reviewed content freely available for anyone to access and view. ","linkImage":{"title":"Open access myths","description":"Bright orange dot pattern fills a shield shape, representative of the Cambridge University Press logo, on a dark orange background","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/6NIEmqNMzKKHvOHi9cLDUR/157e57a4457dce626d4c3469f15caff6/OA_Myths.png","details":{"size":50542,"image":{"width":450,"height":270}},"fileName":"OA Myths.png","contentType":"image/png"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"open-access-myths","introTitle":"Open access myths","introText":"Open access is an important way to make high-quality, peer reviewed content freely available for anyone to access and view. ","contentBlocks":[],"areas":["open-access-publishing"]}},{"name":"staticPage","value":{"pageTitle":"Hybrid Open Access FAQs","pageBlurb":"Discover move about what hybrid open access is and what's possible in our frequently asked questions","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"hybrid-open-access-faqs","introTitle":"Hybrid open access - Frequently asked questions","contentBlocks":[],"areas":["open-access-publishing"]}},{"name":"url","value":{"title":"Eligibility checker","url":"/core/eligibility-checker","ariaLabel":"Eligibility checker"}}],"column2Heading":"Open access resources","column2StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"Open access resources","pageBlurb":"Find Open Access resources for authors, including; information on creative commons licences, funder mandates and policies and much more. ","linkImage":{"title":"Open Access - Resource for authors","description":"Open Access - Resource for authors image","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/nZUZJsiSdEQ92gahE9HVA/5ed0042cab7f26c29ce64460ccbbb273/Open-Access-button-v2.png","details":{"size":10991,"image":{"width":500,"height":300}},"fileName":"Open-Access-button-v2.png","contentType":"image/png"}},"altText":"A bright orange background, with light orange stripes has superimposed text in a white font that reads Open Access, resources for authors ","slug":"open-access-resources","introTitle":"Open access resources","introText":"Open access (OA) has become an important way to make research findings freely available for anyone to access and view. Open access serves authors and the wider community by publishing high-quality, peer-reviewed OA content. We support and promote all forms of OA that are financially sustainable. Our open access resources page provides some essential information about the types of OA we offer.","contentBlocks":[],"areas":["open-access-publishing"]}},{"name":"staticPage","value":{"pageTitle":"Benefits of open access","pageBlurb":"Discover the benefits of open access on our dedicated page which covers; Discoverability and dissemination, Educational and other re-use and Public access and engagement.","linkImage":{"title":"Benefits of open access","description":"Pale yellow spot pattern fills a shield shape, representative of the Cambridge University Press logo, on a dark yellow background.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/RzyZtGBB0v0qAeZdQG2lA/b986a003757ddb9089555b2388542208/benefits_of_open_access.png","details":{"size":53285,"image":{"width":450,"height":270}},"fileName":"benefits of open access.png","contentType":"image/png"}},"altText":"Pale yellow spot pattern fills a shield shape, representative of the Cambridge University Press logo, on a dark yellow background.","slug":"benefits-of-open-access","introTitle":"Benefits of open access","introText":"Publishing open access helps to advance discovery by allowing anyone, anywhere in the world with internet access to find, access, and benefit from your research. Open access plays an important part in allowing us to fulfil our mission of furthering the advancement of learning, knowledge and research worldwide.","contentBlocks":[],"areas":["open-access-publishing"]}},{"name":"staticPage","value":{"pageTitle":"Creative commons licenses","pageBlurb":"Creative Commons (CC) licences play an important role in facilitating Gold Open Access publishing. They provide a legal framework for giving users the ability to freely view, download and distribute content.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"creative-commons-licenses","introTitle":"Creative Commons (CC) licences","introText":"Creative Commons (CC) licences play an important role in facilitating Gold Open Access publishing. They provide a legal framework for giving users the ability to freely view, download and distribute content.","contentBlocks":[],"areas":["open-access-publishing"]}},{"name":"staticPage","value":{"pageTitle":"Funder policies and mandates","pageBlurb":"Many funders around the world are introducing policies and mandates concerning public access to the research they fund. The policies vary and researchers should check the terms and conditions of their research grants.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"funder-policies-and-mandates","introTitle":"Funder policies and mandates","introText":"Many funders around the world are introducing policies and mandates concerning public access to the research they fund. The policies vary and researchers should check the terms and conditions of their research grants in order to be aware of the particular conditions they must abide by. ","contentBlocks":[],"areas":["open-access-publishing"]}},{"name":"staticPage","value":{"pageTitle":"Article type definitions","pageBlurb":"Discover which article types are covered in transformative agreements via our dedicated article type definitions page","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"article-type-definitions","introTitle":"Article type definitions","contentBlocks":[],"areas":["open-access-publishing"]}},{"name":"staticPage","value":{"pageTitle":"Convert your article to Open Access","pageBlurb":"If you would like to publish your article to Open Access, we ask that you select a creative commons licence. The CC licence you choose will determine how readers can use your content. ","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"convert-your-article-to-open-access","introTitle":"Publishing OA under a Read and Publish agreement","contentBlocks":[],"areas":["open-access-publishing"]}},{"name":"staticPage","value":{"pageTitle":"Open access video resources","pageBlurb":"Comprise of Open Access (OA) Advantage: Webinar, benefits, myths, flipping journal to OA, publishing OA with Cambridge","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"Comprise of Open Access (OA) Advantage: Webinar, benefits, myths, flipping journal to OA, publishing OA with Cambridge","slug":"open-access-video-resources","introTitle":"Open Access Video Resources","contentBlocks":[],"areas":["open-access-publishing"]}}],"slug":"open-access-publishing"}},{"name":"navigationBarArea","value":{"title":"Open research initiatives","column1Heading":"Research transparency","column1StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"Transparency and openness","pageBlurb":"Transparency and openness are core values of academic research and are essential if new observations and discoveries are to fully contribute to advances in global knowledge. ","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","slug":"transparency-and-openness","introTitle":"Transparency and openness","introText":"Transparency and openness are core values of academic research and are essential if new observations and discoveries are to fully contribute to advances in global knowledge. Research articles serve their readers best when they provide sufficient information for new assertions and findings to be properly evaluated and built upon.","contentBlocks":[],"areas":["open-research-initiatives"]}},{"name":"staticPage","value":{"pageTitle":"Open Practice Badges","pageBlurb":"Open Practice Badges are incentives for researchers to share data, materials, or to preregister their work. They are designed to be displayed on published articles to show that authors have engaged in these open practices.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"open-practice-badges","introTitle":"Open Practice Badges","contentBlocks":[],"areas":["open-research-initiatives"]}},{"name":"staticPage","value":{"pageTitle":"OA organisations, initiatives & directories","pageBlurb":"Discover more information on the Open Access organisations, initiatives and directories we are a member of.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"oa-organisations-initiatives-and-directories","introTitle":"OA organisations, initiatives and directories","introText":"Cambridge University Press is a member of a number of organisations, initiatives and directories which we have provided information on below;","contentBlocks":[],"areas":["open-research-initiatives"]}},{"name":"staticPage","value":{"pageTitle":"Registered Reports","pageBlurb":"Registered Reports are a publishing format developed by the Center for Open Science to incentivise and reward good research practices and study design. ","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"registered-reports","introTitle":"Registered Reports","contentBlocks":[],"areas":["open-research-initiatives"]}},{"name":"staticPage","value":{"pageTitle":"Annotation for Transparent Inquiry (ATI)","pageBlurb":"Annotation for Transparent Inquiry (ATI) is a new tool designed to facilitate transparency in qualitative and mixed-methods research. ","linkImage":{"title":"frontlist","description":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1yzIb3CIxMfQ2CTz8eswKt/8b72cfb68d75445b52d567fbe3573eb7/frontlist.JPG","details":{"size":20050,"image":{"width":334,"height":341}},"fileName":"frontlist.JPG","contentType":"image/jpeg"}},"altText":"A prism reflects a spectrum of light over a white shield on a purple background. The shield represents the shape of the Cambridge University Press logo.","slug":"annotation-for-transparent-inquiry-ati","introTitle":"Annotation for Transparent Inquiry (ATI)","introText":"Annotation for Transparent Inquiry (ATI) is a new tool designed to facilitate transparency in qualitative and mixed-methods research. ","contentBlocks":[],"areas":["open-research-initiatives"]}}],"column2Heading":"Journal flips","column2StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"Open access journal flips","pageBlurb":"We are transforming the vast majority of research publishing in our journals to open access by 2025. 41 journals have flipped to open access for 2024 as part of this transformation, and we are pleased to announce a further 79 flips for 2025.","linkImage":{"title":"openaccess","description":"Open access logo - a stylised open orange padlock. The logo is displayed on a grey stripped background.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/12DeiUx4ALihi8EgNJ6WB1/e1e67c2e7082bd23f1a676d4186b04b6/openaccess.JPG","details":{"size":15431,"image":{"width":432,"height":286}},"fileName":"openaccess.JPG","contentType":"image/jpeg"}},"altText":"Open access logo - a stylised open orange padlock. The logo is displayed on a grey stripped background.","slug":"open-access-journal-flips","introTitle":"Open access journal flips","introText":"We are transforming the vast majority of research publishing in our journals to open access by 2025. 41 journals have flipped to open access for 2024 as part of this transformation, and we are pleased to announce a further 79 flips for 2025.","contentBlocks":[],"areas":["open-research-initiatives"]}},{"name":"staticPage","value":{"pageTitle":"OA Journal Flip FAQs","pageBlurb":"Find answers to questions about open access journal flips for authors, readers, librarians, society members, editorial board members and publishing partners.","linkImage":{"title":"CoreHomepage_PH_edit","description":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/4PiPfW4LdMHTslTjTN19Di/d4ee372bf3c7e8ec90c169960571ceff/CoreHomepage_PH_edit.JPG","details":{"size":73633,"image":{"width":971,"height":551}},"fileName":"CoreHomepage_PH_edit.JPG","contentType":"image/jpeg"}},"altText":"An image of the night sky, showing stars and galaxies. The dark outline of a shield has been superimposed over the top. ","slug":"oa-journal-flip-faqs","introTitle":"OA Journal Flip FAQs","introText":"Find answers to questions about open access journal flips for authors, readers, librarians, society members, editorial board members and publishing partners.","contentBlocks":[],"areas":["open-research-initiatives"]}}],"column3Heading":"Flip it Open","column3StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"Flip it Open","pageBlurb":"Flip it Open is our new programme which aims to fund the open access publication of titles through typical purchasing habits. Once titles meet a set amount of revenue, we have committed to make them freely available as open access books.","linkImage":{"title":"flip it open","description":"Image of a door, slightly ajar, with large pink and green balls bouncing through, against a grey background wall.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1Iumh0y51Li1h1czm7xT3s/ffb2baaf34ed6fd6ec5ecea8721a2e17/flip_it_open.JPG","details":{"size":9674,"image":{"width":354,"height":199}},"fileName":"flip it open.JPG","contentType":"image/jpeg"}},"altText":"Image of a door, slightly ajar, with large pink and green balls bouncing through, against a grey background wall.","slug":"flip-it-open","introTitle":"Flip it Open","introText":"Flip it Open is our new programme which aims to fund the open access publication of 100 titles through typical purchasing habits. Once titles meet a set amount of revenue, we have committed to make them freely available as open access books here on Cambridge Core and also as an affordable paperback. Just another way we're building an open future.","contentBlocks":[],"areas":["open-research-initiatives"]}},{"name":"staticPage","value":{"pageTitle":"Flip it Open FAQs","pageBlurb":"Flip it Open is our new programme which aims to fund the open access publication of titles through typical purchasing habits. Once titles meet a set amount of revenue, we have committed to make them freely available as open access books.","linkImage":{"title":"flip it open","description":"Image of a door, slightly ajar, with large pink and green balls bouncing through, against a grey background wall.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1Iumh0y51Li1h1czm7xT3s/ffb2baaf34ed6fd6ec5ecea8721a2e17/flip_it_open.JPG","details":{"size":9674,"image":{"width":354,"height":199}},"fileName":"flip it open.JPG","contentType":"image/jpeg"}},"altText":"Image of a door, slightly ajar, with large pink and green balls bouncing through, against a grey background wall.","slug":"flip-it-open-faqs","introTitle":"Flip it Open FAQs","introText":"Flip it Open is our new programme which aims to fund the open access publication of 100 titles through typical purchasing habits. Once titles meet a set amount of revenue, we have committed to make them freely available as open access books here on Cambridge Core and also as an affordable paperback. Just another way we're building an open future.","contentBlocks":[],"areas":["open-research-initiatives"]}}],"slug":"open-research-initiatives"}},{"name":"navigationBarArea","value":{"title":"Open access funding","hubPage":{"name":"staticPage","value":{"pageTitle":"Funding open access publication","pageBlurb":"We believe that journals must publish articles based on the quality of the work rather than an author's ability to pay.","linkImage":{"title":"Funding open access publication","description":"Dark orange dot pattern fills a shield shape, representative of the Cambridge University Press logo, on a bright orange background.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/7te0Mjpty44biAUabJ5RPF/d6e37cd51d396f6ec99817b89b00b235/Funding_OA.png","details":{"size":53374,"image":{"width":450,"height":270}},"fileName":"Funding OA.png","contentType":"image/png"}},"altText":"Dark orange dot pattern fills a shield shape, representative of the Cambridge University Press logo, on a bright orange background.","slug":"funding-open-access-publication","introTitle":"Funding open access publication","introText":"We believe that journals must publish articles based on the quality of the work rather than an author's ability to pay. The editorial process – including the decision of whether or not to accept an article for publication – should be independent of the author’s decision to publish Gold Open Access (OA), in cases where this is optional, as well as being independent of how the publication costs for a Gold OA article are funded.","contentBlocks":[],"areas":["open-access-funding"]}},"banner":{"title":"3","description":"An image of prairie land and dusky skies with a green shield superimposed to the left.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/7lbnCGEEZDq5LJSmPTzfqB/ff8d397038089070b71384ed0ce45028/3.jpg","details":{"size":313671,"image":{"width":2560,"height":1440}},"fileName":"3.jpg","contentType":"image/jpeg"}},"column1Heading":"Open access funding","column1StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"Funding open access publication","pageBlurb":"We believe that journals must publish articles based on the quality of the work rather than an author's ability to pay.","linkImage":{"title":"Funding open access publication","description":"Dark orange dot pattern fills a shield shape, representative of the Cambridge University Press logo, on a bright orange background.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/7te0Mjpty44biAUabJ5RPF/d6e37cd51d396f6ec99817b89b00b235/Funding_OA.png","details":{"size":53374,"image":{"width":450,"height":270}},"fileName":"Funding OA.png","contentType":"image/png"}},"altText":"Dark orange dot pattern fills a shield shape, representative of the Cambridge University Press logo, on a bright orange background.","slug":"funding-open-access-publication","introTitle":"Funding open access publication","introText":"We believe that journals must publish articles based on the quality of the work rather than an author's ability to pay. The editorial process – including the decision of whether or not to accept an article for publication – should be independent of the author’s decision to publish Gold Open Access (OA), in cases where this is optional, as well as being independent of how the publication costs for a Gold OA article are funded.","contentBlocks":[],"areas":["open-access-funding"]}},{"name":"staticPage","value":{"pageTitle":"Cambridge Open Equity Initiative","pageBlurb":"The Cambridge Open Equity Initiative is a new pilot designed to support authors in low- and middle-income countries who wish to publish their research open access in our journals but do not have access to funding.","linkImage":{"title":"Cambridge Open Equity Initiative","description":"Blue vertical stripes fill a shield shape, representative of the Cambridge University Press logo.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/6JjuLI91OqL5yWfpDCBabw/57656fcab1c004e52e2ac5221bf45ae3/COEI.png","details":{"size":7741,"image":{"width":450,"height":270}},"fileName":"COEI.png","contentType":"image/png"}},"altText":"Blue vertical stripes fill a shield shape, representative of the Cambridge University Press logo.","slug":"cambridge-open-equity-initiative","introTitle":"Cambridge Open Equity Initiative","introText":"The Cambridge Open Equity Initiative is designed to support authors in low- and middle-income countries who wish to publish their research open access in our journals but do not have access to funding.","contentBlocks":[],"areas":["open-access-funding"]}}],"slug":"open-access-funding"}},{"name":"navigationBarArea","value":{"title":" Cambridge Open Engage","hubPage":{"name":"staticPage","value":{"pageTitle":"Cambridge Open Engage","pageBlurb":"Cambridge Open Engage is the early research platform from Cambridge University Press. The site offers researchers a new space to rapidly disseminate early and open research, including preprints, posters, presentations and conference papers.","linkImage":{"title":"COE Banner for Core","description":"COE Banner for Core","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/11sHvd1kZ8Vow8Yew8U8Cb/5f69b2462388856cfdb9984c750c6d3c/COE-banner-for-Core-1200x175.jpg","details":{"size":30597,"image":{"width":1600,"height":233}},"fileName":"COE-banner-for-Core-1200x175.jpg","contentType":"image/jpeg"}},"altText":"Cambridge Open Engage banner with its logo on the left side of the text and with a blue-green background color.","slug":"cambridge-open-engage","introTitle":"Cambridge Open Engage","contentBlocks":[],"areas":["cambridge-open-engage"]}},"column1Heading":" Cambridge Open Engage","column1StaticPagesOrUrls":[{"name":"staticPage","value":{"pageTitle":"Cambridge Open Engage","pageBlurb":"Cambridge Open Engage is the early research platform from Cambridge University Press. The site offers researchers a new space to rapidly disseminate early and open research, including preprints, posters, presentations and conference papers.","linkImage":{"title":"COE Banner for Core","description":"COE Banner for Core","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/11sHvd1kZ8Vow8Yew8U8Cb/5f69b2462388856cfdb9984c750c6d3c/COE-banner-for-Core-1200x175.jpg","details":{"size":30597,"image":{"width":1600,"height":233}},"fileName":"COE-banner-for-Core-1200x175.jpg","contentType":"image/jpeg"}},"altText":"Cambridge Open Engage banner with its logo on the left side of the text and with a blue-green background color.","slug":"cambridge-open-engage","introTitle":"Cambridge Open Engage","contentBlocks":[],"areas":["cambridge-open-engage"]}},{"name":"staticPage","value":{"pageTitle":"Partner With Us","pageBlurb":"We work with partners - such as learned societies, funders and departments or research centres within research institutions - in a range of ways.","linkImage":{"title":"Partner With Us","description":"","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/54Be88OUjKkNI6xbxmes8I/1196f9840ef75b6e7cf6aec599616505/partner-with-us.jpg","details":{"size":50015,"image":{"width":1250,"height":300}},"fileName":"partner-with-us.jpg","contentType":"image/jpeg"}},"altText":"Cambridge Open Engage Partner with Us banner with a yellow background for the Cambridge Core website.","slug":"engage-partner-with-us","introTitle":"Partner With Us","contentBlocks":[],"areas":["cambridge-open-engage"]}},{"name":"staticPage","value":{"pageTitle":"Branded Hubs","pageBlurb":"Work with us to build your own branded space within Cambridge Open Engage. We offer partners such as learned societies, departments, and many more.","linkImage":{"title":"Branded Hubs","description":"Cambridge Open Engage logo with Branded Hubs banner with yellow background on Cambridge Core website.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1HYQfWBUMmFh5cxXT4ASk3/dd42f22e9034baad9e25666308f03055/Branded_hubs.JPG","details":{"size":13529,"image":{"width":318,"height":192}},"fileName":"Branded hubs.JPG","contentType":"image/jpeg"}},"altText":"Cambridge Open Engage logo with Branded Hubs banner with yellow background on Cambridge Core website.","slug":"engage-branded-hubs","introTitle":"Branded Hubs","contentBlocks":[],"areas":["cambridge-open-engage"]}},{"name":"staticPage","value":{"pageTitle":"Event Workspaces","pageBlurb":"We offer branded event spaces where you can share outputs from events, offer commenting features allowing attendees to ask questions, and much more.","linkImage":{"title":"Event Workspaces","description":"Cambridge Open Engage with logo Event Workspaces banner with a yellow background on Cambridge Core website.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/1JNYmsm8HkoalS94741ud2/0299af5939154363dff989acb51ae3b3/Event_workspaces.JPG","details":{"size":13881,"image":{"width":320,"height":192}},"fileName":"Event workspaces.JPG","contentType":"image/jpeg"}},"altText":"Cambridge Open Engage with logo Event Workspaces banner with a yellow background on Cambridge Core website.","slug":"engage-event-workspaces","introTitle":"Event Workspaces","contentBlocks":[],"areas":["cambridge-open-engage"]}},{"name":"staticPage","value":{"pageTitle":"Partner Resources","pageBlurb":"Spread the word to your researchers by sending them to Cambridge Open Engage, or contact us if you'd like to discuss tailored messaging for your community.","linkImage":{"title":"Partner Resources","description":"Cambridge Open Engage with logo Partner Resources banner with a yellow background on Cambridge Core website.","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/2NdKUR1NBikcoqYsjp1ePQ/0838ba4b5725badec595663d5ab08147/Partner_resources.JPG","details":{"size":13317,"image":{"width":316,"height":190}},"fileName":"Partner resources.JPG","contentType":"image/jpeg"}},"altText":"Cambridge Open Engage with logo Partner Resources banner with a yellow background on Cambridge Core website.","slug":"engage-partner-resources","introTitle":"Partner Resources","introText":"A selection of resources to aid partners with informing their members, staff, authors and editors about the benefits and functionality of Cambridge Open Engage.","contentBlocks":[],"areas":["cambridge-open-engage"]}},{"name":"staticPage","value":{"pageTitle":"APSA Preprints","pageBlurb":"APSA Preprints is a free-to-access pre-publication platform dedicated to early research outputs in political science, and developed through the collaboration of the American Political Science Association and Cambridge University Press.","linkImage":{"title":"APSA Preprints Banner","description":"","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/37QmpvOaJxxp9ksVaulObU/6ea66e49ff4f3d920eb5c43db27c02ce/APSA-Preprints.jpg","details":{"size":40928,"image":{"width":2000,"height":440}},"fileName":"APSA-Preprints.jpg","contentType":"image/jpeg"}},"altText":"APSA Preprints American Political Science Association logo banner with blue and green text color at the right and a cut circle at the left.","slug":"engage-apsa-preprints","introTitle":"APSA Preprints","contentBlocks":[],"areas":["cambridge-open-engage"]}},{"name":"staticPage","value":{"pageTitle":"APSA Preprints FAQs","pageBlurb":"If you have any questions about APSA Preprints that are not addressed by the FAQs or User Guide, you can contact preprints@apsanet.org.","linkImage":{"title":"APSA Preprints Banner","description":"","file":{"url":"//images.ctfassets.net/ulsp6w1o06p0/37QmpvOaJxxp9ksVaulObU/6ea66e49ff4f3d920eb5c43db27c02ce/APSA-Preprints.jpg","details":{"size":40928,"image":{"width":2000,"height":440}},"fileName":"APSA-Preprints.jpg","contentType":"image/jpeg"}},"altText":"APSA Preprints American Political Science Association logo banner with blue and green text color at the right and a cut circle at the left.","slug":"engage-apsa-preprints-faqs","introTitle":"APSA Preprints FAQs","contentBlocks":[],"areas":["cambridge-open-engage"]}}],"slug":"cambridge-open-engage"}}]}}],"youtube":{"name":"url","value":{"title":"Youtube","url":"https://www.youtube.com/playlist?list=PLTK8KRW19hUVucVRHbIx73oLKUro8HXt0","ariaLabel":"Visit Cambridge University Press Youtube account. Opens in a new tab."}},"xTwitter":{"name":"url","value":{"title":"xTwitter","url":"https://twitter.com/CambridgeCore","ariaLabel":"Visit Cambridge University Press X account. Opens in a new tab."}},"facebook":{"name":"url","value":{"title":"Facebook","url":"https://www.facebook.com/CambridgeCore","ariaLabel":"Visit Cambridge University Press Facebook account. Opens in a new tab."}},"instagram":{"name":"url","value":{"title":"Instagram","url":"https://www.instagram.com/cambridgeuniversitypress/","ariaLabel":"Visit Cambridge University Press Instagram account. Opens in a new tab."}},"linkedin":{"name":"url","value":{"title":"Linkedin","url":"https://www.linkedin.com/showcase/11096649","ariaLabel":"Visit Cambridge University Press Linkedin account. Opens in a new tab."}},"accessibility":{"name":"url","value":{"title":"Accessibility","url":"/core/accessibility","ariaLabel":"Cambridge Core accessibility page"}},"contactAndHelp":{"name":"url","value":{"title":"Contact & Help","url":"/core/help/FAQs","ariaLabel":"Cambridge Core contact & help page"}},"legalNotices":{"name":"url","value":{"title":"Legal notices","url":"/core/legal-notices/terms","ariaLabel":"Cambridge Core legal notices page"}}}},"locations":[{"id":"4cceb186-9e0f-4a5c-9cae-5fb7faf7c98e","alpha2Code":"AF","alpha3Code":"AFG","shortName":"Afghanistan","status":"officially-assigned","kkCountryId":"1","cupBranch":"D","cupRegion":"E","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"7a8af489-758b-4a78-832d-dd9e754b5512","alpha2Code":"AX","alpha3Code":"ALA","shortName":"Aland Islands","status":"officially-assigned","kkCountryId":"240","cupBranch":"NULL","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"6ec2aa7c-8144-4413-9a53-1e477c4c836a","alpha2Code":"AL","alpha3Code":"ALB","shortName":"Albania","status":"officially-assigned","kkCountryId":"2","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"522e3e12-6761-4d4d-bcaa-eb24f87aec05","alpha2Code":"DZ","alpha3Code":"DZA","shortName":"Algeria","status":"officially-assigned","kkCountryId":"3","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"a01524a2-a472-44b6-9ebe-a61fd156973d","alpha2Code":"AS","alpha3Code":"ASM","shortName":"American Samoa","status":"officially-assigned","kkCountryId":"4","cupBranch":"N","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"8317dd2d-d1f4-4009-ad3c-457ab2ba3a14","alpha2Code":"AD","alpha3Code":"AND","shortName":"Andorra","status":"officially-assigned","kkCountryId":"5","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":true},{"id":"1bf09008-eab9-4805-bdd4-2158a298f1f9","alpha2Code":"AO","alpha3Code":"AGO","shortName":"Angola","status":"officially-assigned","kkCountryId":"6","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":true},{"id":"ea735233-adeb-4652-8f09-22420fc62809","alpha2Code":"AI","alpha3Code":"AIA","shortName":"Anguilla","status":"officially-assigned","kkCountryId":"7","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"20d19e9e-af37-4d13-a146-799796d8b6c0","alpha2Code":"AQ","alpha3Code":"ATA","shortName":"Antarctica","status":"officially-assigned","kkCountryId":"8","cupBranch":"NULL","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"075b9cdb-8bcd-4afa-bea8-38fcb3a91d03","alpha2Code":"AG","alpha3Code":"ATG","shortName":"Antigua and Barbuda","status":"officially-assigned","kkCountryId":"9","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"5e891ca3-69c1-429c-9673-4b0ca191b8f4","alpha2Code":"AR","alpha3Code":"ARG","shortName":"Argentina","status":"officially-assigned","kkCountryId":"10","cupBranch":"N","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"0ee9aa6f-ad1f-4760-95d4-4d233a39b6fe","alpha2Code":"AM","alpha3Code":"ARM","shortName":"Armenia","status":"officially-assigned","kkCountryId":"11","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"c01b6345-36af-4b5e-804b-81e4def241ff","alpha2Code":"AW","alpha3Code":"ABW","shortName":"Aruba","status":"officially-assigned","kkCountryId":"12","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"49d6f1dc-ca68-42f8-9f0f-3906decfe986","alpha2Code":"AU","alpha3Code":"AUS","shortName":"Australia","status":"officially-assigned","kkCountryId":"13","cupBranch":"A","cupRegion":"A","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"AUD","ecommLabel":"Total (incl. GST)","sensitiveTerritory":false},{"id":"f8c6231d-104b-45f9-b8bd-59d058c26617","alpha2Code":"AT","alpha3Code":"AUT","shortName":"Austria","status":"officially-assigned","kkCountryId":"14","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"e6898057-dce5-4267-9839-6f246dc56655","alpha2Code":"AZ","alpha3Code":"AZE","shortName":"Azerbaijan","status":"officially-assigned","kkCountryId":"15","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"5303e81f-50eb-474d-9de4-642d76633624","alpha2Code":"BS","alpha3Code":"BHS","shortName":"Bahamas","status":"officially-assigned","kkCountryId":"16","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"b39f1489-b2e0-4c8c-af10-6c1a1d1d51c8","alpha2Code":"BH","alpha3Code":"BHR","shortName":"Bahrain","status":"officially-assigned","kkCountryId":"17","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"813ffe7c-1bdc-43b7-a14c-ffe7d7c0da9d","alpha2Code":"BD","alpha3Code":"BGD","shortName":"Bangladesh","status":"officially-assigned","kkCountryId":"18","cupBranch":"D","cupRegion":"D","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"5e8eb00f-958a-440e-b98d-d3c0d7fc98c3","alpha2Code":"BB","alpha3Code":"BRB","shortName":"Barbados","status":"officially-assigned","kkCountryId":"19","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":true},{"id":"3046521d-29d4-40f3-853e-3f401ead4807","alpha2Code":"BY","alpha3Code":"BLR","shortName":"Belarus","status":"officially-assigned","kkCountryId":"20","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"f1e88a22-8adc-4a00-bbb2-a99ec1b6ec39","alpha2Code":"BE","alpha3Code":"BEL","shortName":"Belgium","status":"officially-assigned","kkCountryId":"21","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"1823ccfc-72e0-4af3-bba7-74ef903f39c5","alpha2Code":"BZ","alpha3Code":"BLZ","shortName":"Belize","status":"officially-assigned","kkCountryId":"22","cupBranch":"O","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"05e40998-408c-474d-9d60-bc9d29780ef6","alpha2Code":"BJ","alpha3Code":"BEN","shortName":"Benin","status":"officially-assigned","kkCountryId":"23","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"76d1cd2b-973d-4f95-ae34-60dae74043da","alpha2Code":"BM","alpha3Code":"BMU","shortName":"Bermuda","status":"officially-assigned","kkCountryId":"24","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"d9890163-5116-4417-aff7-73eee8cf767b","alpha2Code":"BT","alpha3Code":"BTN","shortName":"Bhutan","status":"officially-assigned","kkCountryId":"25","cupBranch":"D","cupRegion":"D","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"2c1cf90a-805b-461b-a568-d5123004356a","alpha2Code":"BO","alpha3Code":"BOL","shortName":"Bolivia","status":"officially-assigned","kkCountryId":"26","cupBranch":"Q","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":true},{"id":"80dac76f-ee03-48f1-bbad-f2dd3a05b557","alpha2Code":"BA","alpha3Code":"BIH","shortName":"Bosnia and Herzegovina","status":"officially-assigned","kkCountryId":"241","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"83ded62f-d6fa-4269-8389-6bb6d3530551","alpha2Code":"BW","alpha3Code":"BWA","shortName":"Botswana","status":"officially-assigned","kkCountryId":"28","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":true},{"id":"1f520814-af4f-4c4f-94ba-9d2f33b1c1f8","alpha2Code":"BV","alpha3Code":"BVT","shortName":"Bouvet Island","status":"officially-assigned","kkCountryId":"29","cupBranch":"NULL","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"3f620225-dde7-4487-a6a1-bb59b383a939","alpha2Code":"BR","alpha3Code":"BRA","shortName":"Brazil","status":"officially-assigned","kkCountryId":"30","cupBranch":"N","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"218df5a9-9145-46cd-927c-09d58d5f9dd5","alpha2Code":"IO","alpha3Code":"IOT","shortName":"British Indian Ocean Territory","status":"officially-assigned","kkCountryId":"31","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"874a057f-d7b0-413d-a102-b900fa63d3c7","alpha2Code":"BN","alpha3Code":"BRN","shortName":"Brunei Darussalam","status":"officially-assigned","kkCountryId":"32","cupBranch":"E","cupRegion":"E","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"f8fcfe8b-de4f-4249-82a9-b25c53f752a7","alpha2Code":"BG","alpha3Code":"BGR","shortName":"Bulgaria","status":"officially-assigned","kkCountryId":"33","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"825c267c-010c-4686-9e3a-8d9a102481d9","alpha2Code":"BF","alpha3Code":"BFA","shortName":"Burkina Faso","status":"officially-assigned","kkCountryId":"34","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"c64375a9-61b0-44d2-94de-43fc25389cfc","alpha2Code":"BI","alpha3Code":"BDI","shortName":"Burundi","status":"officially-assigned","kkCountryId":"35","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"f571cf28-91cb-4c0d-8a4b-debd3047ab42","alpha2Code":"KH","alpha3Code":"KHM","shortName":"Cambodia","status":"officially-assigned","kkCountryId":"36","cupBranch":"E","cupRegion":"E","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":true},{"id":"5ee10546-d2a4-40c8-b221-02baac2aceb7","alpha2Code":"CM","alpha3Code":"CMR","shortName":"Cameroon","status":"officially-assigned","kkCountryId":"37","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":true},{"id":"df5651a0-e129-4f1b-b6ad-2f2dc1a61f70","alpha2Code":"CA","alpha3Code":"CAN","shortName":"Canada","status":"officially-assigned","kkCountryId":"38","cupBranch":"N","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"fb011ad1-74ca-4235-a7ba-5003ba82e3de","alpha2Code":"CV","alpha3Code":"CPV","shortName":"Cape Verde","status":"officially-assigned","kkCountryId":"39","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"f38f6d00-c96d-4362-b92f-8eb37b011f40","alpha2Code":"KY","alpha3Code":"CYM","shortName":"Cayman Islands","status":"officially-assigned","kkCountryId":"40","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":true},{"id":"c0add3c3-c40b-4cda-b80f-acf3b8e0224e","alpha2Code":"CF","alpha3Code":"CAF","shortName":"Central African Republic","status":"officially-assigned","kkCountryId":"41","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"579966ba-d860-4886-94d0-e67d02f84a3e","alpha2Code":"TD","alpha3Code":"TCD","shortName":"Chad","status":"officially-assigned","kkCountryId":"42","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"b2179410-845b-4891-8e62-968eb7eb1b9e","alpha2Code":"IM","alpha3Code":"IMN","shortName":"Channel Islands, Isle of Man","status":"officially-assigned","kkCountryId":"243","cupBranch":"NULL","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"621336c2-1da9-432a-9c7b-d3d5839ed6ab","alpha2Code":"CL","alpha3Code":"CHL","shortName":"Chile","status":"officially-assigned","kkCountryId":"43","cupBranch":"N","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"296c1d80-767f-4036-912b-5f73422f1a22","alpha2Code":"CN","alpha3Code":"CHN","shortName":"China","status":"officially-assigned","kkCountryId":"44","cupBranch":"E","cupRegion":"E","regionCodes":[],"embargoedCountryDigital":1,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":true},{"id":"eb60ae74-39b7-4400-bf55-0d65502a6c73","alpha2Code":"CX","alpha3Code":"CXR","shortName":"Christmas Island","status":"officially-assigned","kkCountryId":"45","cupBranch":"A","cupRegion":"A","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"048a93ca-514c-42cc-b460-cf620e540eb0","alpha2Code":"CC","alpha3Code":"CCK","shortName":"Cocos (Keeling) Islands","status":"officially-assigned","kkCountryId":"46","cupBranch":"A","cupRegion":"A","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"93f3e798-c2b9-4240-be44-9e1b60e837cc","alpha2Code":"CO","alpha3Code":"COL","shortName":"Colombia","status":"officially-assigned","kkCountryId":"47","cupBranch":"Q","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"9b9db9aa-5411-4d1d-b663-f090006a0f07","alpha2Code":"KM","alpha3Code":"COM","shortName":"Comoros","status":"officially-assigned","kkCountryId":"48","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"893b2388-9613-4a21-b513-7cb3af28ca02","alpha2Code":"CG","alpha3Code":"COG","shortName":"Congo","status":"officially-assigned","kkCountryId":"49","cupBranch":"NULL","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"1f635c2a-859c-45e7-9c82-6b985d327344","alpha2Code":"CD","alpha3Code":"COD","shortName":"Congo, The Democratic Republic of the","status":"officially-assigned","kkCountryId":"242","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"e1b80aee-0209-4a3f-bbd1-eb7eaf5b8d4a","alpha2Code":"CK","alpha3Code":"COK","shortName":"Cook Islands","status":"officially-assigned","kkCountryId":"50","cupBranch":"A","cupRegion":"A","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"801c58d4-4ad9-42a6-867d-71e9aa0fa6fd","alpha2Code":"CR","alpha3Code":"CRI","shortName":"Costa Rica","status":"officially-assigned","kkCountryId":"51","cupBranch":"Q","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"7b6c730a-0a5a-4404-b790-98d5795ef876","alpha2Code":"CI","alpha3Code":"CIV","shortName":"Cote D'Ivoire","status":"officially-assigned","kkCountryId":"52","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"e223963e-bacb-4397-b9d4-a2b6089ed2df","alpha2Code":"HR","alpha3Code":"HRV","shortName":"Croatia","status":"officially-assigned","kkCountryId":"53","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"2b7a9450-97e2-482d-b493-dec2c8d6271a","alpha2Code":"CU","alpha3Code":"CUB","shortName":"Cuba","status":"officially-assigned","kkCountryId":"54","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":1,"embargoedCountryPhysical":1,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"9c1673be-ad0d-4ace-beb1-bc90aa7aaa81","alpha2Code":"CY","alpha3Code":"CYP","shortName":"Cyprus","status":"officially-assigned","kkCountryId":"55","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"a882aa2a-0c36-463c-8bcc-f7a39b4afc30","alpha2Code":"CZ","alpha3Code":"CZE","shortName":"Czech Republic","status":"officially-assigned","kkCountryId":"56","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"2c65e5f6-06d7-47f5-96b1-a4de391bd665","alpha2Code":"DK","alpha3Code":"DNK","shortName":"Denmark","status":"officially-assigned","kkCountryId":"57","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"27fc0d3d-d582-408c-aba4-405b9d47561f","alpha2Code":"DJ","alpha3Code":"DJI","shortName":"Djibouti","status":"officially-assigned","kkCountryId":"58","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"0ca16a87-f6e1-4d66-b97a-a2b0d0c47a12","alpha2Code":"DM","alpha3Code":"DMA","shortName":"Dominica","status":"officially-assigned","kkCountryId":"59","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"e81463fc-6dca-4065-b460-3788de92c400","alpha2Code":"DO","alpha3Code":"DOM","shortName":"Dominican Republic","status":"officially-assigned","kkCountryId":"60","cupBranch":"Q","cupRegion":"Q","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"e36b7e41-87f5-4cf2-b989-e98cf2302c15","alpha2Code":"TP","alpha3Code":"TMP","shortName":"East Timor","status":"officially-assigned","kkCountryId":"61","cupBranch":"A","cupRegion":"A","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"0a3f7221-8e0b-4544-87aa-39d757fea3b8","alpha2Code":"EC","alpha3Code":"ECU","shortName":"Ecuador","status":"officially-assigned","kkCountryId":"62","cupBranch":"Q","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":true},{"id":"6f4e6491-3e03-4c7f-9150-7bfa50ee6432","alpha2Code":"EG","alpha3Code":"EGY","shortName":"Egypt","status":"officially-assigned","kkCountryId":"63","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"43351744-5158-4569-a9a8-946c9ac2245a","alpha2Code":"SV","alpha3Code":"SLV","shortName":"El Salvador","status":"officially-assigned","kkCountryId":"64","cupBranch":"O","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"7000fd0d-4dc6-4d1a-90fe-e5353a407af5","alpha2Code":"GQ","alpha3Code":"GNQ","shortName":"Equatorial Guinea","status":"officially-assigned","kkCountryId":"65","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"20a43d20-1135-4a18-8c6a-6bcd6f9793b1","alpha2Code":"ER","alpha3Code":"ERI","shortName":"Eritrea","status":"officially-assigned","kkCountryId":"66","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"be3990d4-098b-4294-ae25-5b5b5ad4079c","alpha2Code":"EE","alpha3Code":"EST","shortName":"Estonia","status":"officially-assigned","kkCountryId":"67","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"57314c79-55a8-4792-8365-6a2ba72a104a","alpha2Code":"ET","alpha3Code":"ETH","shortName":"Ethiopia","status":"officially-assigned","kkCountryId":"68","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"fa4b8ced-ff82-4ca9-9452-caad93f772fb","alpha2Code":"FK","alpha3Code":"FLK","shortName":"Falkland Islands (Malvinas)","status":"officially-assigned","kkCountryId":"69","cupBranch":"N","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"45c3a918-4f45-4640-b62e-5a41cb3a7f90","alpha2Code":"FO","alpha3Code":"FRO","shortName":"Faroe Islands","status":"officially-assigned","kkCountryId":"70","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"a2f4e53e-19ba-4795-83c6-616f537e57b3","alpha2Code":"FJ","alpha3Code":"FJI","shortName":"Fiji","status":"officially-assigned","kkCountryId":"71","cupBranch":"A","cupRegion":"A","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"865d0154-f45c-44f1-93f0-f2ea86ea51fc","alpha2Code":"FI","alpha3Code":"FIN","shortName":"Finland","status":"officially-assigned","kkCountryId":"72","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"05dd7d5d-8a87-43e0-85c7-8c759916e772","alpha2Code":"FR","alpha3Code":"FRA","shortName":"France","status":"officially-assigned","kkCountryId":"73","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"03497ee5-48ee-4cdc-b775-0ade025586d4","alpha2Code":"GF","alpha3Code":"GUF","shortName":"French Guiana","status":"officially-assigned","kkCountryId":"75","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"791e2817-e292-4258-beb9-2a6c94bb5d69","alpha2Code":"PF","alpha3Code":"PYF","shortName":"French Polynesia","status":"officially-assigned","kkCountryId":"76","cupBranch":"A","cupRegion":"A","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"bb9290d9-fb8e-4529-baf9-b27e5528980d","alpha2Code":"TF","alpha3Code":"ATF","shortName":"French Southern Territories","status":"officially-assigned","kkCountryId":"77","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"afaa83a3-f2db-4be0-bdca-5453f35f1ae7","alpha2Code":"GA","alpha3Code":"GAB","shortName":"Gabon","status":"officially-assigned","kkCountryId":"78","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"68325bad-f70d-4ffd-b67c-5de5ebdfecbd","alpha2Code":"GM","alpha3Code":"GMB","shortName":"Gambia","status":"officially-assigned","kkCountryId":"79","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"a9a68e5f-bb52-481d-bbe6-b652ac54f20d","alpha2Code":"GE","alpha3Code":"GEO","shortName":"Georgia","status":"officially-assigned","kkCountryId":"80","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"94230303-e27a-4c7e-83c0-59f3b65cb72b","alpha2Code":"DE","alpha3Code":"DEU","shortName":"Germany","status":"officially-assigned","kkCountryId":"81","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"40151893-12fb-4cc2-8564-33d43fcdb38b","alpha2Code":"GH","alpha3Code":"GHA","shortName":"Ghana","status":"officially-assigned","kkCountryId":"82","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"fcb90596-ccc1-4c0d-9c21-12786102ca92","alpha2Code":"GI","alpha3Code":"GIB","shortName":"Gibraltar","status":"officially-assigned","kkCountryId":"83","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"51d2507a-91a0-43cd-824b-a7cbd20bb1f6","alpha2Code":"GR","alpha3Code":"GRC","shortName":"Greece","status":"officially-assigned","kkCountryId":"84","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"30514745-a50b-448e-90d9-c3bf603f9218","alpha2Code":"GL","alpha3Code":"GRL","shortName":"Greenland","status":"officially-assigned","kkCountryId":"85","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"80e039ef-ccd8-4d9c-9651-01a75dbd2b18","alpha2Code":"GD","alpha3Code":"GRD","shortName":"Grenada","status":"officially-assigned","kkCountryId":"86","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"cb2cecc3-84ae-4a09-b0c5-1e2e94274264","alpha2Code":"GP","alpha3Code":"GLP","shortName":"Guadeloupe","status":"officially-assigned","kkCountryId":"87","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"187928f6-cd34-4669-b48a-077f7562c651","alpha2Code":"GU","alpha3Code":"GUM","shortName":"Guam","status":"officially-assigned","kkCountryId":"88","cupBranch":"N","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"3cf99d09-1a9d-4e56-b0ff-a3f886a242dd","alpha2Code":"GT","alpha3Code":"GTM","shortName":"Guatemala","status":"officially-assigned","kkCountryId":"89","cupBranch":"O","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"0c0bd020-ee8f-458b-84ba-c4969b8f3d9e","alpha2Code":"GG","alpha3Code":"GGY","shortName":"Guernsey","status":"officially-assigned","kkCountryId":"256","cupBranch":"NULL","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"05f3afe1-7c4f-4ca2-962d-0db4aa6b410c","alpha2Code":"GN","alpha3Code":"GIN","shortName":"Guinea","status":"officially-assigned","kkCountryId":"90","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"2f917b45-0301-4eca-bf2c-3644498bb848","alpha2Code":"GW","alpha3Code":"GNB","shortName":"Guinea-bissau","status":"officially-assigned","kkCountryId":"91","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"dcaf4941-7baa-41b4-9b00-3464cd91eff9","alpha2Code":"GY","alpha3Code":"GUY","shortName":"Guyana","status":"officially-assigned","kkCountryId":"92","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"c33a6cf0-d4fc-443d-b9e6-d42f630d1af2","alpha2Code":"HT","alpha3Code":"HTI","shortName":"Haiti","status":"officially-assigned","kkCountryId":"93","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"e6179487-b4ca-4d8f-92ab-a7864b84f85d","alpha2Code":"HM","alpha3Code":"HMD","shortName":"Heard and Mc Donald Islands","status":"officially-assigned","kkCountryId":"94","cupBranch":"NULL","cupRegion":"A","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"d11f91c5-5187-4d08-b48d-9077a0768435","alpha2Code":"HN","alpha3Code":"HND","shortName":"Honduras","status":"officially-assigned","kkCountryId":"95","cupBranch":"O","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"b67c3d03-f527-4dbd-a07d-01a82bef1107","alpha2Code":"HK","alpha3Code":"HKG","shortName":"Hong Kong","status":"officially-assigned","kkCountryId":"96","cupBranch":"E","cupRegion":"E","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"47b97900-a473-4bf5-981b-9de200e6b34f","alpha2Code":"HU","alpha3Code":"HUN","shortName":"Hungary","status":"officially-assigned","kkCountryId":"97","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"614cb0b5-e364-44d9-8c58-ff8d88a58156","alpha2Code":"IS","alpha3Code":"ISL","shortName":"Iceland","status":"officially-assigned","kkCountryId":"98","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"bd34cc15-3c6e-40a6-a0d4-e5d564a28536","alpha2Code":"IN","alpha3Code":"IND","shortName":"India","status":"officially-assigned","kkCountryId":"99","cupBranch":"D","cupRegion":"D","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"87f30ea0-a22f-40b0-aeb6-6a35d7f32bf3","alpha2Code":"ID","alpha3Code":"IDN","shortName":"Indonesia","status":"officially-assigned","kkCountryId":"100","cupBranch":"E","cupRegion":"E","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"c01d7696-a9ab-4703-9e1a-78e5607d9352","alpha2Code":"IR","alpha3Code":"IRN","shortName":"Iran, Islamic Republic of","status":"officially-assigned","kkCountryId":"244","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":1,"embargoedCountryPhysical":1,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"0ceb019d-6fbd-4deb-9f86-024807ca8aa2","alpha2Code":"IQ","alpha3Code":"IRQ","shortName":"Iraq","status":"officially-assigned","kkCountryId":"102","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"84740583-b333-4f4a-8a42-3a211eaa7af3","alpha2Code":"IE","alpha3Code":"IRL","shortName":"Ireland","status":"officially-assigned","kkCountryId":"103","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"979019bc-315b-4619-9a23-9eb96fa30b42","alpha2Code":"IL","alpha3Code":"ISR","shortName":"Israel","status":"officially-assigned","kkCountryId":"104","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"a0301abd-eeaf-4e34-b578-328b434dce05","alpha2Code":"IT","alpha3Code":"ITA","shortName":"Italy","status":"officially-assigned","kkCountryId":"105","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"a082a2fa-fece-4cda-978a-af61bb51f297","alpha2Code":"JM","alpha3Code":"JAM","shortName":"Jamaica","status":"officially-assigned","kkCountryId":"106","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"e0d7d043-51c6-4e34-8609-0f26b56bbaa3","alpha2Code":"JP","alpha3Code":"JPN","shortName":"Japan","status":"officially-assigned","kkCountryId":"107","cupBranch":"F","cupRegion":"E","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"cf6cbfe5-435d-4f16-b202-b75935b9bfd1","alpha2Code":"JE","alpha3Code":"JEY","shortName":"Jersey","status":"officially-assigned","kkCountryId":"254","cupBranch":"NULL","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"301a18e0-a4ff-4454-b99a-1f7ddaf960fc","alpha2Code":"JO","alpha3Code":"JOR","shortName":"Jordan","status":"officially-assigned","kkCountryId":"108","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"a2be4056-1959-41fd-b83b-a1a6e6790668","alpha2Code":"KZ","alpha3Code":"KAZ","shortName":"Kazakhstan","status":"officially-assigned","kkCountryId":"109","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":true},{"id":"8a09cfd7-9e32-461f-9814-51cf3d205afa","alpha2Code":"KE","alpha3Code":"KEN","shortName":"Kenya","status":"officially-assigned","kkCountryId":"110","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"0e97bf1b-5058-4bcf-9e56-e3e59ec097bf","alpha2Code":"KI","alpha3Code":"KIR","shortName":"Kiribati","status":"officially-assigned","kkCountryId":"111","cupBranch":"A","cupRegion":"A","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"f78723d2-da0d-4208-b398-d64225f3db40","alpha2Code":"KP","alpha3Code":"PRK","shortName":"Korea, Democratic People's Republic of","status":"officially-assigned","kkCountryId":"112","cupBranch":"E","cupRegion":"E","regionCodes":[],"embargoedCountryDigital":1,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"77f81edb-1401-4d4a-a221-6e17780dcc64","alpha2Code":"KR","alpha3Code":"KOR","shortName":"Korea, Republic of","status":"officially-assigned","kkCountryId":"113","cupBranch":"E","cupRegion":"E","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"e4f2006174dd3a2b0174f952ffbb141b","alpha2Code":"XK","alpha3Code":"UNK","shortName":"Kosovo","status":"officially-assigned","kkCountryId":"0","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"d309f426-ed06-491e-af88-ae18beb738d3","alpha2Code":"KW","alpha3Code":"KWT","shortName":"Kuwait","status":"officially-assigned","kkCountryId":"114","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"8be878e2-8c54-4b7c-b0b4-45f440cc39a4","alpha2Code":"KG","alpha3Code":"KGZ","shortName":"Kyrgyzstan","status":"officially-assigned","kkCountryId":"115","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":true},{"id":"22b8d6c8-a08f-4ce2-b547-c075e7402d5d","alpha2Code":"LA","alpha3Code":"LAO","shortName":"Lao People's Democratic Republic","status":"officially-assigned","kkCountryId":"116","cupBranch":"E","cupRegion":"E","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"66842dc2-54f5-4545-8fe9-10825844cd0b","alpha2Code":"LV","alpha3Code":"LVA","shortName":"Latvia","status":"officially-assigned","kkCountryId":"117","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"f2344dfd-803e-438b-b83b-2d83411344f5","alpha2Code":"LB","alpha3Code":"LBN","shortName":"Lebanon","status":"officially-assigned","kkCountryId":"118","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":true},{"id":"cf97eb3b-a38a-4d0a-a86d-3c62af7cc102","alpha2Code":"LS","alpha3Code":"LSO","shortName":"Lesotho","status":"officially-assigned","kkCountryId":"119","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"0d696510-0141-46c3-a9dd-73032d1dc369","alpha2Code":"LR","alpha3Code":"LBR","shortName":"Liberia","status":"officially-assigned","kkCountryId":"120","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"e8da5281-6f13-4500-844c-af6e50a1817d","alpha2Code":"LY","alpha3Code":"LBY","shortName":"Libyan Arab Jamahiriya","status":"officially-assigned","kkCountryId":"121","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"9e30228c-05f0-465e-8227-9deb4392ec80","alpha2Code":"LI","alpha3Code":"LIE","shortName":"Liechtenstein","status":"officially-assigned","kkCountryId":"122","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"f18fcd58-f002-4a29-bddd-88ab368390e4","alpha2Code":"LT","alpha3Code":"LTU","shortName":"Lithuania","status":"officially-assigned","kkCountryId":"123","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"bf550d77-9986-49c0-bda6-f8df4a33731b","alpha2Code":"LU","alpha3Code":"LUX","shortName":"Luxembourg","status":"officially-assigned","kkCountryId":"124","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"6142574e-44d7-4df3-b80f-11e0a1ffa8e1","alpha2Code":"MO","alpha3Code":"MAC","shortName":"Macau","status":"officially-assigned","kkCountryId":"125","cupBranch":"E","cupRegion":"E","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"b61d957a-4d6a-46a1-9df6-eafbd496bcc4","alpha2Code":"MK","alpha3Code":"MKD","shortName":"Macedonia","status":"officially-assigned","kkCountryId":"247","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"f9c8f20a-0224-4405-a2b4-2dddeb899216","alpha2Code":"MG","alpha3Code":"MDG","shortName":"Madagascar","status":"officially-assigned","kkCountryId":"127","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"a1e24326-20ed-4e87-a4b5-852bcba55fc7","alpha2Code":"MW","alpha3Code":"MWI","shortName":"Malawi","status":"officially-assigned","kkCountryId":"128","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"55e049f1-34d1-4a72-84db-6b20eb50be32","alpha2Code":"MY","alpha3Code":"MYS","shortName":"Malaysia","status":"officially-assigned","kkCountryId":"129","cupBranch":"E","cupRegion":"E","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"b43dce9c-06f7-489e-b21b-1b581fd1b376","alpha2Code":"MV","alpha3Code":"MDV","shortName":"Maldives","status":"officially-assigned","kkCountryId":"130","cupBranch":"D","cupRegion":"D","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"f2186aa4-a2e6-472e-b953-e2cff11e723a","alpha2Code":"ML","alpha3Code":"MLI","shortName":"Mali","status":"officially-assigned","kkCountryId":"131","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"7a0ab8e8-39b3-4db9-bf76-fc6788ab0791","alpha2Code":"MT","alpha3Code":"MLT","shortName":"Malta","status":"officially-assigned","kkCountryId":"132","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"851a02c7-636d-4906-9178-375d5dca7138","alpha2Code":"MH","alpha3Code":"MHL","shortName":"Marshall Islands","status":"officially-assigned","kkCountryId":"133","cupBranch":"A","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"33938e2e-84d6-47f9-af01-6544c2556a5f","alpha2Code":"MQ","alpha3Code":"MTQ","shortName":"Martinique","status":"officially-assigned","kkCountryId":"134","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":true},{"id":"ee4f5569-bdd1-407b-957d-d1242217c485","alpha2Code":"MR","alpha3Code":"MRT","shortName":"Mauritania","status":"officially-assigned","kkCountryId":"135","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"9eab61a4-b4e2-4a36-a057-f5db49aea119","alpha2Code":"MU","alpha3Code":"MUS","shortName":"Mauritius","status":"officially-assigned","kkCountryId":"136","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"68048470-cbf1-46b1-8f48-69d4c29c111e","alpha2Code":"YT","alpha3Code":"MYT","shortName":"Mayotte","status":"officially-assigned","kkCountryId":"137","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"2870e1d0-f8d8-44fc-8b14-f7edb8503b86","alpha2Code":"MX","alpha3Code":"MEX","shortName":"Mexico","status":"officially-assigned","kkCountryId":"138","cupBranch":"O","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"7c3a61f7-61e4-4675-af2d-0e42f46cf477","alpha2Code":"FM","alpha3Code":"FSM","shortName":"Micronesia, Federated States of","status":"officially-assigned","kkCountryId":"139","cupBranch":"A","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"d6389849-d545-4980-99a2-541aa9b9ee17","alpha2Code":"MD","alpha3Code":"MDA","shortName":"Moldova, Republic of","status":"officially-assigned","kkCountryId":"140","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":true},{"id":"379a1832-9838-44d1-b577-184ca5f5daff","alpha2Code":"MC","alpha3Code":"MCO","shortName":"Monaco","status":"officially-assigned","kkCountryId":"141","cupBranch":"NULL","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"cc8ff54a-a9e7-41f4-b3c6-f63599d081e1","alpha2Code":"MN","alpha3Code":"MNG","shortName":"Mongolia","status":"officially-assigned","kkCountryId":"142","cupBranch":"C","cupRegion":"E","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"6d6467a5-7349-4779-b335-631dda710c21","alpha2Code":"ME","alpha3Code":"MNE","shortName":"Montenegro","status":"officially-assigned","kkCountryId":"246","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"a53aa839-afc4-4570-9d12-3733f9afd851","alpha2Code":"MS","alpha3Code":"MSR","shortName":"Montserrat","status":"officially-assigned","kkCountryId":"143","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"6db8c37d-c5a0-43db-812f-539c8ff3dd8a","alpha2Code":"MA","alpha3Code":"MAR","shortName":"Morocco","status":"officially-assigned","kkCountryId":"144","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"USD","ecommLabel":null,"sensitiveTerritory":true},{"id":"730b89e1-8ee5-47cf-a292-28758e3a7f20","alpha2Code":"MZ","alpha3Code":"MOZ","shortName":"Mozambique","status":"officially-assigned","kkCountryId":"145","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"5ce552e1-eedc-4350-a4c6-f6a2898def41","alpha2Code":"MM","alpha3Code":"MMR","shortName":"Myanmar","status":"officially-assigned","kkCountryId":"146","cupBranch":"E","cupRegion":"E","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"63b3ee95-fe51-4901-a7ae-fc0bc2cd81df","alpha2Code":"NA","alpha3Code":"NAM","shortName":"Namibia","status":"officially-assigned","kkCountryId":"147","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"d3ae3784-58d2-48ee-a6f1-9749a339c902","alpha2Code":"NR","alpha3Code":"NRU","shortName":"Nauru","status":"officially-assigned","kkCountryId":"148","cupBranch":"A","cupRegion":"A","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"042203fb-c380-4c2b-8d90-f97239823d9f","alpha2Code":"NP","alpha3Code":"NPL","shortName":"Nepal","status":"officially-assigned","kkCountryId":"149","cupBranch":"D","cupRegion":"D","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"8623257e-9ce6-4fd7-aff5-1858ca8c1545","alpha2Code":"NL","alpha3Code":"NLD","shortName":"Netherlands","status":"officially-assigned","kkCountryId":"150","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"97a6b88a-5cea-4541-ad90-ea14531d1e89","alpha2Code":"AN","alpha3Code":"ANT","shortName":"Netherlands Antilles","status":"officially-assigned","kkCountryId":"151","cupBranch":"NULL","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"8129284e-e369-4380-be57-644541694140","alpha2Code":"NC","alpha3Code":"NCL","shortName":"New Caledonia","status":"officially-assigned","kkCountryId":"152","cupBranch":"A","cupRegion":"A","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"fa967094-29de-42a2-99e4-2a8deb0eecc3","alpha2Code":"NZ","alpha3Code":"NZL","shortName":"New Zealand","status":"officially-assigned","kkCountryId":"153","cupBranch":"A","cupRegion":"A","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":"Total (incl. GST)","sensitiveTerritory":false},{"id":"20c4ce92-62d7-447a-98e7-48722dbc7f8c","alpha2Code":"NI","alpha3Code":"NIC","shortName":"Nicaragua","status":"officially-assigned","kkCountryId":"154","cupBranch":"O","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"39d2607b-d95e-48bc-8722-9af51474faac","alpha2Code":"NE","alpha3Code":"NER","shortName":"Niger","status":"officially-assigned","kkCountryId":"155","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"0e6848ae-cbc9-4e95-8c06-193a6025a20a","alpha2Code":"NG","alpha3Code":"NGA","shortName":"Nigeria","status":"officially-assigned","kkCountryId":"156","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"825aee1c-2546-432c-a8ce-fe8533971658","alpha2Code":"NU","alpha3Code":"NIU","shortName":"Niue","status":"officially-assigned","kkCountryId":"157","cupBranch":"A","cupRegion":"A","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"798cfed5-ffd1-40d0-bd63-6ea5438b0511","alpha2Code":"NF","alpha3Code":"NFK","shortName":"Norfolk Island","status":"officially-assigned","kkCountryId":"158","cupBranch":"A","cupRegion":"A","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"e0ea495c-ad0d-4de3-a171-896216b1406a","alpha2Code":"MP","alpha3Code":"MNP","shortName":"Northern Mariana Islands","status":"officially-assigned","kkCountryId":"159","cupBranch":"NULL","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"acd087bf-0c83-4c18-83a7-b8d08cdfd5ac","alpha2Code":"NO","alpha3Code":"NOR","shortName":"Norway","status":"officially-assigned","kkCountryId":"160","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"6cafeb12-f2c3-46ef-b9e2-3e5eb010f605","alpha2Code":"OM","alpha3Code":"OMN","shortName":"Oman","status":"officially-assigned","kkCountryId":"161","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"92af6c5d-aa6e-4f19-8260-7a9965860a59","alpha2Code":"PK","alpha3Code":"PAK","shortName":"Pakistan","status":"officially-assigned","kkCountryId":"162","cupBranch":"D","cupRegion":"E","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":true},{"id":"cfbd5050-98fe-4def-a217-98cece6f0237","alpha2Code":"PW","alpha3Code":"PLW","shortName":"Palau","status":"officially-assigned","kkCountryId":"163","cupBranch":"NULL","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"bb3310cf-b407-416e-b3d9-523e2ce1d794","alpha2Code":"PS","alpha3Code":"PSE","shortName":"Palestinian Territory, Occupied","status":"officially-assigned","kkCountryId":"248","cupBranch":"NULL","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"e2f744c2-e5d0-491e-aa88-1421e18615bc","alpha2Code":"PA","alpha3Code":"PAN","shortName":"Panama","status":"officially-assigned","kkCountryId":"164","cupBranch":"Q","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":true},{"id":"2688a966-22e0-4d18-b5f0-c1e2e2ad65d0","alpha2Code":"PG","alpha3Code":"PNG","shortName":"Papua New Guinea","status":"officially-assigned","kkCountryId":"165","cupBranch":"A","cupRegion":"A","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"9f7b77ce-3572-417d-bbbf-22d90f13f704","alpha2Code":"PY","alpha3Code":"PRY","shortName":"Paraguay","status":"officially-assigned","kkCountryId":"166","cupBranch":"N","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":true},{"id":"4da3ee97-6df0-470d-98cf-562df8c8d164","alpha2Code":"PE","alpha3Code":"PER","shortName":"Peru","status":"officially-assigned","kkCountryId":"167","cupBranch":"Q","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"0f106ec3-9de4-4f85-bb09-26d7830bbdf4","alpha2Code":"PH","alpha3Code":"PHL","shortName":"Philippines","status":"officially-assigned","kkCountryId":"168","cupBranch":"E","cupRegion":"E","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"9c5b6ddf-210d-44ff-82b6-9ad51f915b83","alpha2Code":"PN","alpha3Code":"PCN","shortName":"Pitcairn","status":"officially-assigned","kkCountryId":"169","cupBranch":"A","cupRegion":"A","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"927b1ada-bf62-47c5-bbc0-df990fb2f818","alpha2Code":"PL","alpha3Code":"POL","shortName":"Poland","status":"officially-assigned","kkCountryId":"170","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"9b7febfc-d8ae-417c-9d43-e48a46dc4da8","alpha2Code":"PT","alpha3Code":"PRT","shortName":"Portugal","status":"officially-assigned","kkCountryId":"171","cupBranch":"I","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"7bdc01da-9217-4e89-9976-fa3c171419bd","alpha2Code":"PR","alpha3Code":"PRI","shortName":"Puerto Rico","status":"officially-assigned","kkCountryId":"172","cupBranch":"N","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":true},{"id":"988553d3-7531-425e-9de8-94e45fdd9d18","alpha2Code":"QA","alpha3Code":"QAT","shortName":"Qatar","status":"officially-assigned","kkCountryId":"173","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"3afbb3fb-e115-43d4-b08e-cf0924bd652e","alpha2Code":"RE","alpha3Code":"REU","shortName":"Reunion","status":"officially-assigned","kkCountryId":"174","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"a7590f7b-5da8-4887-aeee-f62ac89e7d73","alpha2Code":"RO","alpha3Code":"ROM","shortName":"Romania","status":"officially-assigned","kkCountryId":"175","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"927c85b7-545c-4c47-bf16-dece1a52bf76","alpha2Code":"RU","alpha3Code":"RUS","shortName":"Russian Federation","status":"officially-assigned","kkCountryId":"176","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":1,"embargoedCountryPhysical":1,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"0142ee9b-ef89-4a7f-8e5d-1560f14a849d","alpha2Code":"RW","alpha3Code":"RWA","shortName":"Rwanda","status":"officially-assigned","kkCountryId":"177","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"26ae3d8f-411c-4617-9e9f-e0240920ccff","alpha2Code":"KN","alpha3Code":"KNA","shortName":"Saint Kitts and Nevis","status":"officially-assigned","kkCountryId":"178","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"30aacdef-10e4-43bb-bc98-4a6764cb1636","alpha2Code":"LC","alpha3Code":"LCA","shortName":"Saint Lucia","status":"officially-assigned","kkCountryId":"179","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"8a4e3f48-2458-4cd1-ade0-36e6a51d3508","alpha2Code":"VC","alpha3Code":"VCT","shortName":"Saint Vincent and the Grenadines","status":"officially-assigned","kkCountryId":"180","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"40904289-77be-48b6-94fc-962f1f4af37d","alpha2Code":"WS","alpha3Code":"WSM","shortName":"Samoa","status":"officially-assigned","kkCountryId":"181","cupBranch":"A","cupRegion":"A","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"697b639c-700f-4829-aafd-0aef39d0b2ae","alpha2Code":"SM","alpha3Code":"SMR","shortName":"San Marino","status":"officially-assigned","kkCountryId":"182","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"620b7524-57af-4fc6-a18d-84983311633b","alpha2Code":"ST","alpha3Code":"STP","shortName":"Sao Tome and Principe","status":"officially-assigned","kkCountryId":"183","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"831a8227-0d4c-4bef-991b-d81f06aafbf4","alpha2Code":"SA","alpha3Code":"SAU","shortName":"Saudi Arabia","status":"officially-assigned","kkCountryId":"184","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"0927ed95-6d83-450f-b038-f2960f2dad79","alpha2Code":"SN","alpha3Code":"SEN","shortName":"Senegal","status":"officially-assigned","kkCountryId":"185","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":true},{"id":"d99ef40c-1a85-4077-881d-2f4c7c6f3046","alpha2Code":"RS","alpha3Code":"SRB","shortName":"Serbia","status":"officially-assigned","kkCountryId":"249","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":true},{"id":"7bb7c2ce-5a84-4f0b-9ed4-d054ff932a0d","alpha2Code":"SC","alpha3Code":"SYC","shortName":"Seychelles","status":"officially-assigned","kkCountryId":"186","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":true},{"id":"5ae5bac7-79d4-4073-9f1f-7e9961b85b52","alpha2Code":"SL","alpha3Code":"SLE","shortName":"Sierra Leone","status":"officially-assigned","kkCountryId":"187","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"bc064ea8-099f-4a8a-a386-48431dabd194","alpha2Code":"SG","alpha3Code":"SGP","shortName":"Singapore","status":"officially-assigned","kkCountryId":"188","cupBranch":"E","cupRegion":"E","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"84e588f5-00f9-4992-a722-8bd05a47a68a","alpha2Code":"SK","alpha3Code":"SVK","shortName":"Slovakia","status":"officially-assigned","kkCountryId":"250","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"ec5419a0-4f83-46e5-9f14-653580687402","alpha2Code":"SI","alpha3Code":"SVN","shortName":"Slovenia","status":"officially-assigned","kkCountryId":"190","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"e034000d-6fb7-4420-a23e-09af7c2ef1a5","alpha2Code":"SB","alpha3Code":"SLB","shortName":"Solomon Islands","status":"officially-assigned","kkCountryId":"191","cupBranch":"A","cupRegion":"A","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"b4e18b13-68f5-4680-b634-c03793a9287b","alpha2Code":"SO","alpha3Code":"SOM","shortName":"Somalia","status":"officially-assigned","kkCountryId":"192","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"11227fdf-051a-4520-89ef-315a5a39ec16","alpha2Code":"ZA","alpha3Code":"ZAF","shortName":"South Africa","status":"officially-assigned","kkCountryId":"193","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"2605c280-574a-4670-bb14-93f0ee29063f","alpha2Code":"GS","alpha3Code":"SGS","shortName":"South Georgia and the South Sandwich Islands","status":"officially-assigned","kkCountryId":"194","cupBranch":"N","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"e4f2006174dd3a2b0174f94f00cc140b","alpha2Code":"SS","alpha3Code":"SSD","shortName":"South Sudan","status":"officially-assigned","kkCountryId":"0","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"27f58fe2-7c39-486f-ac6a-26da3b68225b","alpha2Code":"ES","alpha3Code":"ESP","shortName":"Spain","status":"officially-assigned","kkCountryId":"195","cupBranch":"I","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"9fe0fd0a-27d6-4723-8ceb-eaf40b45f064","alpha2Code":"LK","alpha3Code":"LKA","shortName":"Sri Lanka","status":"officially-assigned","kkCountryId":"196","cupBranch":"D","cupRegion":"D","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"8740a5ed-79e0-42a5-9769-6e6f035ff78b","alpha2Code":"SH","alpha3Code":"SHN","shortName":"St. Helena","status":"officially-assigned","kkCountryId":"197","cupBranch":"NULL","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"16b75816-519b-43d4-81a4-10292ea39d1a","alpha2Code":"PM","alpha3Code":"SPM","shortName":"St. Pierre and Miquelon","status":"officially-assigned","kkCountryId":"198","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"5d27adfe-e461-4a2e-8703-5ec9ac6c1241","alpha2Code":"SD","alpha3Code":"SDN","shortName":"Sudan","status":"officially-assigned","kkCountryId":"199","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"6a7a0bf9-1b4d-4cc9-bf4f-fcef994b6716","alpha2Code":"SR","alpha3Code":"SUR","shortName":"Suriname","status":"officially-assigned","kkCountryId":"200","cupBranch":"Q","cupRegion":"Q","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"874d22e5-1e92-4125-87ab-3471711ee562","alpha2Code":"SJ","alpha3Code":"SJM","shortName":"Svalbard and Jan Mayen Islands","status":"officially-assigned","kkCountryId":"201","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"233630f3-d0d7-46df-836b-cef65edd0705","alpha2Code":"SZ","alpha3Code":"SWZ","shortName":"Swaziland","status":"officially-assigned","kkCountryId":"202","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"161e0c12-5539-472d-8681-47d12e0f7c87","alpha2Code":"SE","alpha3Code":"SWE","shortName":"Sweden","status":"officially-assigned","kkCountryId":"203","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"e418e58e-6977-42f0-90f9-1edeb74c9620","alpha2Code":"CH","alpha3Code":"CHE","shortName":"Switzerland","status":"officially-assigned","kkCountryId":"204","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"970c958c-d257-4243-996c-5cf54a892450","alpha2Code":"SY","alpha3Code":"SYR","shortName":"Syrian Arab Republic","status":"officially-assigned","kkCountryId":"205","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":1,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"bb572853-7dfb-45b9-8d72-5d65f614e2d1","alpha2Code":"TW","alpha3Code":"TWN","shortName":"Taiwan","status":"officially-assigned","kkCountryId":"206","cupBranch":"E","cupRegion":"E","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":true},{"id":"115100c9-86b0-481e-9960-b19253485425","alpha2Code":"TJ","alpha3Code":"TJK","shortName":"Tajikistan","status":"officially-assigned","kkCountryId":"207","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"43093a64-4dfb-49a0-ac35-a81d3ba8680d","alpha2Code":"TZ","alpha3Code":"TZA","shortName":"Tanzania, United Republic of","status":"officially-assigned","kkCountryId":"208","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":true},{"id":"9de26d08-cbd0-46dd-95c3-d26c11b91ad9","alpha2Code":"TH","alpha3Code":"THA","shortName":"Thailand","status":"officially-assigned","kkCountryId":"209","cupBranch":"E","cupRegion":"E","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"e4f2006174dd3a2b0174f938e23413f1","alpha2Code":"TL","alpha3Code":"TLS","shortName":"Timor-Leste","status":"officially-assigned","kkCountryId":"0","cupBranch":"E","cupRegion":"E","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"396a394b-48bd-491d-8783-8e92c49cbf9c","alpha2Code":"TG","alpha3Code":"TGO","shortName":"Togo","status":"officially-assigned","kkCountryId":"210","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"c0ea7925-a0ec-4827-b13d-d7dc3666132a","alpha2Code":"TK","alpha3Code":"TKL","shortName":"Tokelau","status":"officially-assigned","kkCountryId":"211","cupBranch":"A","cupRegion":"A","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"7f008c68-9ed3-444d-9fb7-fdcd6def32e8","alpha2Code":"TO","alpha3Code":"TON","shortName":"Tonga","status":"officially-assigned","kkCountryId":"212","cupBranch":"A","cupRegion":"A","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"6cc3a480-5cc3-4aaa-b464-3a4638710353","alpha2Code":"TT","alpha3Code":"TTO","shortName":"Trinidad and Tobago","status":"officially-assigned","kkCountryId":"213","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"f8df523a-eb8b-4971-b5c0-d46bfc28d25e","alpha2Code":"TN","alpha3Code":"TUN","shortName":"Tunisia","status":"officially-assigned","kkCountryId":"214","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"c92bc644-fdae-4111-9393-78a5d845a49d","alpha2Code":"TR","alpha3Code":"TUR","shortName":"Türkiye","status":"officially-assigned","kkCountryId":"215","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"63efbcff-1e4b-4821-99c1-d67174df50db","alpha2Code":"TM","alpha3Code":"TKM","shortName":"Turkmenistan","status":"officially-assigned","kkCountryId":"216","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"560a128b-dabd-411b-ab88-2c3718176995","alpha2Code":"TC","alpha3Code":"TCA","shortName":"Turks and Caicos Islands","status":"officially-assigned","kkCountryId":"217","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"48917d67-0131-4a0d-ba34-34b7844e4f99","alpha2Code":"TV","alpha3Code":"TUV","shortName":"Tuvalu","status":"officially-assigned","kkCountryId":"218","cupBranch":"A","cupRegion":"A","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"63c2a03c-2ef3-427d-8fc4-1c5fdd81cb28","alpha2Code":"UG","alpha3Code":"UGA","shortName":"Uganda","status":"officially-assigned","kkCountryId":"219","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"a25acd1f-1576-4eed-99f3-e690fa8fcdaf","alpha2Code":"UA","alpha3Code":"UKR","shortName":"Ukraine","status":"officially-assigned","kkCountryId":"220","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"5b7988cb-c044-4d74-9eda-3262b689088f","alpha2Code":"AE","alpha3Code":"ARE","shortName":"United Arab Emirates","status":"officially-assigned","kkCountryId":"221","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"ba985020-38b1-45d7-89bd-fc95a625e0c7","alpha2Code":"GB","alpha3Code":"GBR","shortName":"United Kingdom","status":"officially-assigned","kkCountryId":"222","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"a22b6cd6-8a7d-404a-bfc3-f28fe1cc9df1","alpha2Code":"US","alpha3Code":"USA","shortName":"United States","status":"officially-assigned","kkCountryId":"223","cupBranch":"N","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"230a425e-b19e-47ae-b07f-e6418807972b","alpha2Code":"UM","alpha3Code":"UMI","shortName":"United States Minor Outlying Islands","status":"officially-assigned","kkCountryId":"224","cupBranch":"N","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"9379fd2c-ce6c-458c-b61d-8a554732ce5c","alpha2Code":"VI","alpha3Code":"VIR","shortName":"United States Virgin Islands","status":"officially-assigned","kkCountryId":"253","cupBranch":"N","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"d282e488-3ecc-45ab-b72c-81dc0c885956","alpha2Code":"UY","alpha3Code":"URY","shortName":"Uruguay","status":"officially-assigned","kkCountryId":"225","cupBranch":"N","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"183122de-4fd8-4b1d-821d-ab89ddd09811","alpha2Code":"UZ","alpha3Code":"UZB","shortName":"Uzbekistan","status":"officially-assigned","kkCountryId":"226","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"9971d148-7a5b-454e-ac18-3c19822988b3","alpha2Code":"VU","alpha3Code":"VUT","shortName":"Vanuatu","status":"officially-assigned","kkCountryId":"227","cupBranch":"A","cupRegion":"A","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"6475d530-8e6d-4abd-a7f0-f4b382cb6db2","alpha2Code":"VA","alpha3Code":"VAT","shortName":"Vatican City","status":"officially-assigned","kkCountryId":"251","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"EUR","ecommLabel":null,"sensitiveTerritory":false},{"id":"9a5b0169-d1c8-4c10-8fa2-32b4ecac4987","alpha2Code":"VE","alpha3Code":"VEN","shortName":"Venezuela","status":"officially-assigned","kkCountryId":"229","cupBranch":"Q","cupRegion":"N","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"745aab4b-9973-41c0-917f-0638e51f3b57","alpha2Code":"VN","alpha3Code":"VNM","shortName":"Vietnam","status":"officially-assigned","kkCountryId":"252","cupBranch":"E","cupRegion":"E","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"9db772a9-22f5-43a9-b935-68ceb56a5a1c","alpha2Code":"VG","alpha3Code":"VGB","shortName":"Virgin Islands (British)","status":"officially-assigned","kkCountryId":"231","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"7bb4a17e-0a63-4141-9e19-b7851eb72937","alpha2Code":"WF","alpha3Code":"WLF","shortName":"Wallis and Futuna Islands","status":"officially-assigned","kkCountryId":"233","cupBranch":"A","cupRegion":"A","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"4a6f26bc-75ad-4d14-a9b8-689a4ec2c55d","alpha2Code":"EH","alpha3Code":"ESH","shortName":"Western Sahara","status":"officially-assigned","kkCountryId":"234","cupBranch":"NULL","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false},{"id":"9e2e344d-f7e3-4ae6-b10c-7f9acd4e7d16","alpha2Code":"YE","alpha3Code":"YEM","shortName":"Yemen","status":"officially-assigned","kkCountryId":"235","cupBranch":"C","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"USD","ecommLabel":null,"sensitiveTerritory":false},{"id":"d0a991ff-3d63-4e93-9f2f-09261a2a874c","alpha2Code":"ZM","alpha3Code":"ZMB","shortName":"Zambia","status":"officially-assigned","kkCountryId":"238","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":0,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":true},{"id":"1f6641ed-117b-4cc6-8880-09b4eb72ae4c","alpha2Code":"ZW","alpha3Code":"ZWE","shortName":"Zimbabwe","status":"officially-assigned","kkCountryId":"239","cupBranch":"R","cupRegion":"C","regionCodes":[],"embargoedCountryDigital":0,"embargoedCountryPhysical":1,"currency":"GBP","ecommLabel":null,"sensitiveTerritory":false}]}</script> </div> <platform-footer id='platform-footer-wc' platform='core' env='prod' class='platform-footer-wc' is-preview='false' style="display: none" ></platform-footer> <script> const platformFooter = $('#platform-footer'); const platformFooterWc = $('#platform-footer-wc'); platformFooterWc.prop('initialData', window.__PLATFORM_FOOTER_DATA__); platformFooterWc.on('initialized', function () { platformFooter.hide(); platformFooterWc.show(); }); platformFooterWc.on('update-location', function (event) { $.post(AOP.baseUrl + '/services/country/override', { countryCode: event.originalEvent.detail[0].alpha2Code }, function () { window.location.reload(); window.scrollTo(0, document.body.scrollHeight); // scroll the bottom of the page }) }) </script> <script src="/core/system/public/bower_components/foundation/js/foundation.js"></script> <script src="/core/cambridge-core/public/bower_components/jquery-lazy/jquery.lazy.min.js"></script> <script src="/core/cambridge-core/public/js/plugins/jquery.autocomplete.min.js"></script> <script src="/core/cambridge-core/public/js/plugins/slick.min.js"></script> <script src="/core/cambridge-core/public/js/plugins/jquery.qtip.min.js"></script> <script src="/core/cambridge-core/public/js/plugins/foggy.min.js"></script> <script src="/core/cambridge-core/public/js/plugins/jquery.cookie.js"></script> <script src="/core/cambridge-core/public/js/plugins/jquery.textresizer.min.js"></script> <script src="/core/cambridge-core/public/js/plugins/chosen.jquery.min.js"></script> <script src="/core/cambridge-core/public/js/plugins/jquery.sticky.js"></script> <script type="text/javascript"> var AOP = AOP || {}; </script> <!-- TODO CHECK --> <!-- system footer start --> <script type="text/javascript" language="javascript"> window.AOP = window.AOP || {}; window.AOP.pageId = ''; window.AOP.baseUrl = '/core'; window.AOP.env = 'prod'; window.AOP.shouldUseCitationTool = true; </script> <script src="/core/system/public/js/foundationUtils.js" language="javascript" type="text/javascript"></script> <script src="/core/system/public/js/confirmModal.js" language="javascript" type="text/javascript"></script> <script src="/core/system/public/js/trimWhitespace.js" language="javascript" type="text/javascript"></script> <script src="/core/system/public/js/loadScript.js" language="javascript" type="text/javascript"></script> <script src="/core/system/public/js/isArray.js"></script> <script src="/core/system/public/js/parseQuerystring.js"></script> <script src="/core/system/public/js/createAlertBox.js"></script> <script src="/core/system/public/js/countryPicker.js"></script> <div id="confirmModalWrapper"> <div id="confirm-modal" class="reveal-modal small" data-reveal role="dialog" aria-labelledby="confirmModalHeader"> <div class="wrapper"> <h1 class="title" id="confirmModalHeader"></h1> <div class="row"> <div class="large-12 columns"> <div class="panel callout message"> </div> </div> </div> <div class="row margin-top"> <div class="small-12 large-6 columns"><a href="#" class="right small button radius transparent cancel">Cancel</a></div> <div class="small-12 large-6 columns"><a href="#" class="left small radius button blue confirm">Confirm</a></div> </div> </div> <a href="#" class="close-reveal-modal"><span aria-hidden="true">×</span></a> </div> </div><!-- CMS Includes --> <script src="/aca/shared-elements/platform-header-footer.umd.js" language="javascript"></script> <script src="/core/cambridge-core/public/js/ecommerce/add-to-basket.js?v=v7.337.1-hotfix" language="javascript"></script> <script src="/core/system/public/js/validation.js" language="javascript" type="text/javascript"></script> <script src="/core/system/public/js/revealModal.js" language="javascript" type="text/javascript"></script> <!-- system footer finish --> <!-- Build: unreadable --> <script type="text/javascript" language="javascript"> window.AOP = window.AOP || {}; window.AOP.webTrendsFpcdom = ''; window.AOP.webtrendsSourceId = ''; window.AOP.oracleInfinityAccountId = ''; window.AOP.isInternalTraffic = false; </script> <script src="/core/cambridge-core/public/js/jquery-extend.js?v=v7.337.1-hotfix"></script> <script src="/core/cambridge-core/public/js/additional.js?v=v7.337.1-hotfix"></script> <script src="/core/cambridge-core/public/js/multilingual.js?v=v7.337.1-hotfix"></script> <script src="/core/cambridge-core/public/js/analytics/cup-events.js?v=v7.337.1-hotfix"></script> <script src="/core/cambridge-core/public/js/app.js?v=v7.337.1-hotfix"></script> <script src="/core/cambridge-core/public/js/accessibility.js?v=v7.337.1-hotfix"></script> <script src="/core/cambridge-core/public/js/analytics/listeners/counter.js?v=v7.337.1-hotfix"></script> <script src="/core/cambridge-core/public/js/ga-events.js?v=v7.337.1-hotfix"></script> <script type="text/javascript"> $(document).ready(function () { if (typeof $(document).cupEvent === 'function') { $(document).cupEvent(); } AOP.initCounterReporter({ apiKey: 'qzD90fb0r257ZKVtGSFnG3UucyyMdgvu8u250Fa0', apiUrl: 'https://usage.prod.aop.cambridge.org/v1/events', applicationId: '8a94020952a738f80152b223c992000b', identities: [], authenticationMethods: [], sessionId: 'aIjf27dbr-H08jop5BJpD-HHFiAExEgw', eventContext: '/core/journals/robotica/article/robotics-goes-prisma/5DA1E6B0701411F71E5FFC40F2E53847' }) }); </script> <script>(function(){function c(){var b=a.contentDocument||a.contentWindow.document;if(b){var d=b.createElement('script');d.innerHTML="window.__CF$cv$params={r:'8e738410bfb2ce55',t:'MTczMjM4OTYyMi4wMDAwMDA='};var a=document.createElement('script');a.nonce='';a.src='/cdn-cgi/challenge-platform/scripts/jsd/main.js';document.getElementsByTagName('head')[0].appendChild(a);";b.getElementsByTagName('head')[0].appendChild(d)}}if(document.body){var a=document.createElement('iframe');a.height=1;a.width=1;a.style.position='absolute';a.style.top=0;a.style.left=0;a.style.border='none';a.style.visibility='hidden';document.body.appendChild(a);if('loading'!==document.readyState)c();else if(window.addEventListener)document.addEventListener('DOMContentLoaded',c);else{var e=document.onreadystatechange||function(){};document.onreadystatechange=function(b){e(b);'loading'!==document.readyState&&(document.onreadystatechange=e,c())}}}})();</script></body> </html> <div id="kindleModal" class="reveal-modal large" data-reveal> <div class="header"> <h1 class="heading_07">Save article to Kindle</h1> </div> <div class="panel"> <div class="row"> <div class="large-12 columns margin-bottom"> <p>To save this article to your Kindle, first ensure coreplatform@cambridge.org is added to your Approved Personal Document E-mail List under your Personal Document Settings on the Manage Your Content and Devices page of your Amazon account. Then enter the ‘name’ part of your Kindle email address below. <a href="/core/help">Find out more about saving to your Kindle</a>. </p> <p> Note you can select to save to either the @free.kindle.com or @kindle.com variations. ‘@free.kindle.com’ emails are free but can only be saved to your device when it is connected to wi-fi. ‘@kindle.com’ emails can be delivered even when you are not connected to wi-fi, but note that service fees apply. </p> <p> Find out more about the <a href="https://www.amazon.com/gp/help/customer/display.html/ref=kinw_myk_wl_ln?ie=UTF8&nodeId=200767340#fees" target="_blank">Kindle Personal Document Service.</a> </p> </div> </div> <div class="row book-title"> <div class="small-12 columns"> <div class="heading_08"><div class="title">Robotics goes PRISMA</div></div> </div> </div> <div class="row book-details book-overview"> <div class="small-12 columns"> <ul> <li class="volume-issue"></li> <li class="author"> <a href="/core/search?filters%5BauthorTerms%5D=Mario Selvaggio&eventCode=SE-AU" class="more-by-this-author ">Mario Selvaggio</a><a target="_blank" class="orcid-logo " href="https://orcid.org/0000-0002-2460-1914" aria-label="Open the ORCID record for Mario Selvaggio in new tab/window"></a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator ">, </span><a href="/core/search?filters%5BauthorTerms%5D=Rocco Moccia&eventCode=SE-AU" class="more-by-this-author ">Rocco Moccia</a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator ">, </span><a href="/core/search?filters%5BauthorTerms%5D=Pierluigi Arpenti&eventCode=SE-AU" class="more-by-this-author ">Pierluigi Arpenti</a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator ">, </span><a href="/core/search?filters%5BauthorTerms%5D=Riccardo Caccavale&eventCode=SE-AU" class="more-by-this-author ">Riccardo Caccavale</a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator ">, </span><a href="/core/search?filters%5BauthorTerms%5D=Fabio Ruggiero&eventCode=SE-AU" class="more-by-this-author ">Fabio Ruggiero</a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator ">, </span><a href="/core/search?filters%5BauthorTerms%5D=Jonathan Cacace&eventCode=SE-AU" class="more-by-this-author ">Jonathan Cacace</a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator ">, </span><a href="/core/search?filters%5BauthorTerms%5D=Fanny Ficuciello&eventCode=SE-AU" class="more-by-this-author ">Fanny Ficuciello</a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator ">, </span><a href="/core/search?filters%5BauthorTerms%5D=Alberto Finzi&eventCode=SE-AU" class="more-by-this-author ">Alberto Finzi</a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator ">, </span><a href="/core/search?filters%5BauthorTerms%5D=Vincenzo Lippiello&eventCode=SE-AU" class="more-by-this-author ">Vincenzo Lippiello</a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator ">, </span><a href="/core/search?filters%5BauthorTerms%5D=Luigi Villani&eventCode=SE-AU" class="more-by-this-author ">Luigi Villani</a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator "> and </span><a href="/core/search?filters%5BauthorTerms%5D=Bruno Siciliano&eventCode=SE-AU" class="more-by-this-author ">Bruno Siciliano</a> <sup data-affiliation-id="a1" class="">(a1)</sup> </li> <li class="meta-info">DOI: <a href="https://doi.org/10.1017/S026357472400033X" target="_blank" class="url">https://doi.org/10.1017/S026357472400033X</a></li> </ul> </div> </div> </div> <div class="row wrapper no-padding-top"> <div class="small-12 columns"> <div class="row margin-top"> <div class="large-12 columns"> <form id="sendTokindleForm" action="/core/services/aop-cambridge-core/sendto" method="GET"> <fieldset> <legend> <label for="kindleEmailAddress">Your Kindle email address</label> </legend> <div class="row"> <div class="large-6 columns"> <input name="kindleEmailAddress" id="kindleEmailAddress" type="text" value="" required="required"/> <small class="error kindleEmailAddress" style="display:none">Please provide your Kindle email.</small> </div> <div class="large-6 columns"> <label class="dInlineBlock"><input name="kindleEmailDomain" type="radio" checked="checked" value="free.kindle.com" />@free.kindle.com</label> <label class="dInlineBlock"><input name="kindleEmailDomain" type="radio" value="kindle.com" />@kindle.com (<a href="https://www.amazon.com/gp/help/customer/display.html/ref=kinw_myk_wl_ln?ie=UTF8&nodeId=200767340#fees" target="_blank">service fees apply</a>)</label> </div> </div> </fieldset> <div class="row"> <div class="large-12 columns"> <fieldset> <legend>Available formats</legend> <label> <input type="checkbox" name="formats" class="formats" value="pdf" required /> PDF </label> <small class="error formats" style="display:none">Please select a format to save.</small> </fieldset> </div> </div> <div class="row"> <div class="large-12 columns"> <label for="usagePolicykindle"> <input type="checkbox" name="usagePolicykindle" id="usagePolicykindle" value="Usage" required/> By using this service, you agree that you will only keep content for personal use, and will not openly distribute them via Dropbox, Google Drive or other file sharing services <small class="error usagePolicy" style="display:none">Please confirm that you accept the terms of use.</small> </label> </div> </div> <input type="hidden" name="suppressTrackingEvent" value="false" /> <input type="hidden" name="service" value="kindle" /> <input type="hidden" name="documents" value="5DA1E6B0701411F71E5FFC40F2E53847" /> <input type="hidden" name="finalReturn" value="/core/journals/robotica/article/robotics-goes-prisma/5DA1E6B0701411F71E5FFC40F2E53847" /> <input type="hidden" name="_csrf" value="sRpck057-QH20_Gw4aG4x5CvlfZjo_GbDahE"> </form> <!-- Submit button start --> <div class="row margin-top"> <div class="small-6 large-6 columns"> <button href="#" class="button radius transparent right closeModal">Cancel</button> </div> <div class="small-6 large-6 columns"> <button type="submit" onClick="$('#sendTokindleForm').submit();" class="button blue radius left"> Save </button> </div> </div> <!-- Submit button end --> </div> </div> </div> </div> <a class="close-reveal-modal">×</a> </div> <script type="text/javascript"> $('#sendTokindleForm').on('submit', validatekindleForm); var _formkindle = $('#sendTokindleForm'); function resetValidationkindleForm() { var _form = _formkindle; _form.find('.error.formats').hide(); _form.find('.error.usagePolicy').hide(); } function validatekindleFormats(e) { var _form = _formkindle; var errors = false; var formats = _form.find('input[name="formats"]:checked'); if (formats.length < 1) { _form.find('.error.formats').show(); errors = true; } else { _form.find('.error.formats').hide(); } return errors; } function validatekindleUsagePolicy(e) { var _form = _formkindle; var usagePolicy = _form.find('input[name="usagePolicykindle"]'); var errors = false; if (!usagePolicy.is(':checked')) { _form.find('.error.usagePolicy').show(); errors = true; } else { _form.find('.error.usagePolicy').hide(); } return errors; } function validateKindleEmail(e) { var _form = _formkindle; var kindleEmailAddress = _form.find('input[name="kindleEmailAddress"]'); var errors = false; if (!kindleEmailAddress.val().trim()) { _form.find('.error.kindleEmailAddress').show(); kindleEmailAddress.addClass('error'); errors = true; } else { _form.find('.error.kindleEmailAddress').hide(); kindleEmailAddress.removeClass('error'); } return errors; } function validatekindleForm(e) { var _form = _formkindle; var resultPolicy = validatekindleUsagePolicy(); var resultFormats = validatekindleFormats(); if (resultPolicy) { $('#usagePolicykindle', _form).one('change', validatekindleUsagePolicy); } if (resultFormats) { $('.formats', _form).one('change', validatekindleFormats); } var resultKindleEmail = validateKindleEmail(e); if (resultKindleEmail) { $('input[name="kindleEmailAddress"]', _form).one('change', validateKindleEmail); } return !resultPolicy && !resultFormats && !resultKindleEmail; } $(document).ready(function () { // reset validation when the modal opens $(document).on('open.fndtn.reveal', '[data-reveal]', function () { resetValidationkindleForm(); // automatically select if there's only one format if ($('#sendTokindleForm input[name="formats"]').length === 1) { $('#sendTokindleForm input[name="formats"]').prop('checked', true); } }); }); </script><div id="dropboxModal" class="reveal-modal large" data-reveal> <div class="header"> <h1 class="heading_07">Save article to Dropbox</h1> </div> <div class="panel"> <div class="row"> <div class="large-12 columns margin-bottom"> <p> To save this article to your Dropbox account, please select one or more formats and confirm that you agree to abide by our usage policies. If this is the first time you used this feature, you will be asked to authorise Cambridge Core to connect with your Dropbox account. <a href="/core/help">Find out more about saving content to Dropbox</a>. </p> </div> </div> <div class="row book-title"> <div class="small-12 columns"> <div class="heading_08"><div class="title">Robotics goes PRISMA</div></div> </div> </div> <div class="row book-details book-overview"> <div class="small-12 columns"> <ul> <li class="volume-issue"></li> <li class="author"> <a href="/core/search?filters%5BauthorTerms%5D=Mario Selvaggio&eventCode=SE-AU" class="more-by-this-author ">Mario Selvaggio</a><a target="_blank" class="orcid-logo " href="https://orcid.org/0000-0002-2460-1914" aria-label="Open the ORCID record for Mario Selvaggio in new tab/window"></a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator ">, </span><a href="/core/search?filters%5BauthorTerms%5D=Rocco Moccia&eventCode=SE-AU" class="more-by-this-author ">Rocco Moccia</a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator ">, </span><a href="/core/search?filters%5BauthorTerms%5D=Pierluigi Arpenti&eventCode=SE-AU" class="more-by-this-author ">Pierluigi Arpenti</a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator ">, </span><a href="/core/search?filters%5BauthorTerms%5D=Riccardo Caccavale&eventCode=SE-AU" class="more-by-this-author ">Riccardo Caccavale</a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator ">, </span><a href="/core/search?filters%5BauthorTerms%5D=Fabio Ruggiero&eventCode=SE-AU" class="more-by-this-author ">Fabio Ruggiero</a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator ">, </span><a href="/core/search?filters%5BauthorTerms%5D=Jonathan Cacace&eventCode=SE-AU" class="more-by-this-author ">Jonathan Cacace</a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator ">, </span><a href="/core/search?filters%5BauthorTerms%5D=Fanny Ficuciello&eventCode=SE-AU" class="more-by-this-author ">Fanny Ficuciello</a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator ">, </span><a href="/core/search?filters%5BauthorTerms%5D=Alberto Finzi&eventCode=SE-AU" class="more-by-this-author ">Alberto Finzi</a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator ">, </span><a href="/core/search?filters%5BauthorTerms%5D=Vincenzo Lippiello&eventCode=SE-AU" class="more-by-this-author ">Vincenzo Lippiello</a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator ">, </span><a href="/core/search?filters%5BauthorTerms%5D=Luigi Villani&eventCode=SE-AU" class="more-by-this-author ">Luigi Villani</a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator "> and </span><a href="/core/search?filters%5BauthorTerms%5D=Bruno Siciliano&eventCode=SE-AU" class="more-by-this-author ">Bruno Siciliano</a> <sup data-affiliation-id="a1" class="">(a1)</sup> </li> <li class="meta-info">DOI: <a href="https://doi.org/10.1017/S026357472400033X" target="_blank" class="url">https://doi.org/10.1017/S026357472400033X</a></li> </ul> </div> </div> </div> <div class="row wrapper no-padding-top"> <div class="small-12 columns"> <div class="row margin-top"> <div class="large-12 columns"> <form id="sendTodropboxForm" action="/core/services/aop-cambridge-core/sendto" method="GET"> <div class="row"> <div class="large-12 columns"> <fieldset> <legend>Available formats</legend> <label> <input type="checkbox" name="formats" class="formats" value="pdf" required /> PDF </label> <small class="error formats" style="display:none">Please select a format to save.</small> </fieldset> </div> </div> <div class="row"> <div class="large-12 columns"> <label for="usagePolicydropbox"> <input type="checkbox" name="usagePolicydropbox" id="usagePolicydropbox" value="Usage" required/> By using this service, you agree that you will only keep content for personal use, and will not openly distribute them via Dropbox, Google Drive or other file sharing services <small class="error usagePolicy" style="display:none">Please confirm that you accept the terms of use.</small> </label> </div> </div> <input type="hidden" name="suppressTrackingEvent" value="false" /> <input type="hidden" name="service" value="dropbox" /> <input type="hidden" name="documents" value="5DA1E6B0701411F71E5FFC40F2E53847" /> <input type="hidden" name="finalReturn" value="/core/journals/robotica/article/robotics-goes-prisma/5DA1E6B0701411F71E5FFC40F2E53847" /> <input type="hidden" name="_csrf" value="sRpck057-QH20_Gw4aG4x5CvlfZjo_GbDahE"> </form> <!-- Submit button start --> <div class="row margin-top"> <div class="small-6 large-6 columns"> <button href="#" class="button radius transparent right closeModal">Cancel</button> </div> <div class="small-6 large-6 columns"> <button type="submit" onClick="$('#sendTodropboxForm').submit();" class="button blue radius left"> Save </button> </div> </div> <!-- Submit button end --> </div> </div> </div> </div> <a class="close-reveal-modal">×</a> </div> <script type="text/javascript"> $('#sendTodropboxForm').on('submit', validatedropboxForm); var _formdropbox = $('#sendTodropboxForm'); function resetValidationdropboxForm() { var _form = _formdropbox; _form.find('.error.formats').hide(); _form.find('.error.usagePolicy').hide(); } function validatedropboxFormats(e) { var _form = _formdropbox; var errors = false; var formats = _form.find('input[name="formats"]:checked'); if (formats.length < 1) { _form.find('.error.formats').show(); errors = true; } else { _form.find('.error.formats').hide(); } return errors; } function validatedropboxUsagePolicy(e) { var _form = _formdropbox; var usagePolicy = _form.find('input[name="usagePolicydropbox"]'); var errors = false; if (!usagePolicy.is(':checked')) { _form.find('.error.usagePolicy').show(); errors = true; } else { _form.find('.error.usagePolicy').hide(); } return errors; } function validatedropboxForm(e) { var _form = _formdropbox; var resultPolicy = validatedropboxUsagePolicy(); var resultFormats = validatedropboxFormats(); if (resultPolicy) { $('#usagePolicydropbox', _form).one('change', validatedropboxUsagePolicy); } if (resultFormats) { $('.formats', _form).one('change', validatedropboxFormats); } return !resultPolicy && !resultFormats ; } $(document).ready(function () { // reset validation when the modal opens $(document).on('open.fndtn.reveal', '[data-reveal]', function () { resetValidationdropboxForm(); // automatically select if there's only one format if ($('#sendTodropboxForm input[name="formats"]').length === 1) { $('#sendTodropboxForm input[name="formats"]').prop('checked', true); } }); }); </script><div id="googleDriveModal" class="reveal-modal large" data-reveal> <div class="header"> <h1 class="heading_07">Save article to Google Drive</h1> </div> <div class="panel"> <div class="row"> <div class="large-12 columns margin-bottom"> <p> To save this article to your Google Drive account, please select one or more formats and confirm that you agree to abide by our usage policies. If this is the first time you used this feature, you will be asked to authorise Cambridge Core to connect with your Google Drive account. <a href="/core/help">Find out more about saving content to Google Drive</a>. </p> </div> </div> <div class="row book-title"> <div class="small-12 columns"> <div class="heading_08"><div class="title">Robotics goes PRISMA</div></div> </div> </div> <div class="row book-details book-overview"> <div class="small-12 columns"> <ul> <li class="volume-issue"></li> <li class="author"> <a href="/core/search?filters%5BauthorTerms%5D=Mario Selvaggio&eventCode=SE-AU" class="more-by-this-author ">Mario Selvaggio</a><a target="_blank" class="orcid-logo " href="https://orcid.org/0000-0002-2460-1914" aria-label="Open the ORCID record for Mario Selvaggio in new tab/window"></a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator ">, </span><a href="/core/search?filters%5BauthorTerms%5D=Rocco Moccia&eventCode=SE-AU" class="more-by-this-author ">Rocco Moccia</a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator ">, </span><a href="/core/search?filters%5BauthorTerms%5D=Pierluigi Arpenti&eventCode=SE-AU" class="more-by-this-author ">Pierluigi Arpenti</a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator ">, </span><a href="/core/search?filters%5BauthorTerms%5D=Riccardo Caccavale&eventCode=SE-AU" class="more-by-this-author ">Riccardo Caccavale</a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator ">, </span><a href="/core/search?filters%5BauthorTerms%5D=Fabio Ruggiero&eventCode=SE-AU" class="more-by-this-author ">Fabio Ruggiero</a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator ">, </span><a href="/core/search?filters%5BauthorTerms%5D=Jonathan Cacace&eventCode=SE-AU" class="more-by-this-author ">Jonathan Cacace</a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator ">, </span><a href="/core/search?filters%5BauthorTerms%5D=Fanny Ficuciello&eventCode=SE-AU" class="more-by-this-author ">Fanny Ficuciello</a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator ">, </span><a href="/core/search?filters%5BauthorTerms%5D=Alberto Finzi&eventCode=SE-AU" class="more-by-this-author ">Alberto Finzi</a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator ">, </span><a href="/core/search?filters%5BauthorTerms%5D=Vincenzo Lippiello&eventCode=SE-AU" class="more-by-this-author ">Vincenzo Lippiello</a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator ">, </span><a href="/core/search?filters%5BauthorTerms%5D=Luigi Villani&eventCode=SE-AU" class="more-by-this-author ">Luigi Villani</a> <sup data-affiliation-id="a1" class="">(a1)</sup><span class="separator "> and </span><a href="/core/search?filters%5BauthorTerms%5D=Bruno Siciliano&eventCode=SE-AU" class="more-by-this-author ">Bruno Siciliano</a> <sup data-affiliation-id="a1" class="">(a1)</sup> </li> <li class="meta-info">DOI: <a href="https://doi.org/10.1017/S026357472400033X" target="_blank" class="url">https://doi.org/10.1017/S026357472400033X</a></li> </ul> </div> </div> </div> <div class="row wrapper no-padding-top"> <div class="small-12 columns"> <div class="row margin-top"> <div class="large-12 columns"> <form id="sendTogoogleDriveForm" action="/core/services/aop-cambridge-core/sendto" method="GET"> <div class="row"> <div class="large-12 columns"> <fieldset> <legend>Available formats</legend> <label> <input type="checkbox" name="formats" class="formats" value="pdf" required /> PDF </label> <small class="error formats" style="display:none">Please select a format to save.</small> </fieldset> </div> </div> <div class="row"> <div class="large-12 columns"> <label for="usagePolicygoogleDrive"> <input type="checkbox" name="usagePolicygoogleDrive" id="usagePolicygoogleDrive" value="Usage" required/> By using this service, you agree that you will only keep content for personal use, and will not openly distribute them via Dropbox, Google Drive or other file sharing services <small class="error usagePolicy" style="display:none">Please confirm that you accept the terms of use.</small> </label> </div> </div> <input type="hidden" name="suppressTrackingEvent" value="false" /> <input type="hidden" name="service" value="googleDrive" /> <input type="hidden" name="documents" value="5DA1E6B0701411F71E5FFC40F2E53847" /> <input type="hidden" name="finalReturn" value="/core/journals/robotica/article/robotics-goes-prisma/5DA1E6B0701411F71E5FFC40F2E53847" /> <input type="hidden" name="_csrf" value="sRpck057-QH20_Gw4aG4x5CvlfZjo_GbDahE"> </form> <!-- Submit button start --> <div class="row margin-top"> <div class="small-6 large-6 columns"> <button href="#" class="button radius transparent right closeModal">Cancel</button> </div> <div class="small-6 large-6 columns"> <button type="submit" onClick="$('#sendTogoogleDriveForm').submit();" class="button blue radius left"> Save </button> </div> </div> <!-- Submit button end --> </div> </div> </div> </div> <a class="close-reveal-modal">×</a> </div> <script type="text/javascript"> $('#sendTogoogleDriveForm').on('submit', validategoogleDriveForm); var _formgoogleDrive = $('#sendTogoogleDriveForm'); function resetValidationgoogleDriveForm() { var _form = _formgoogleDrive; _form.find('.error.formats').hide(); _form.find('.error.usagePolicy').hide(); } function validategoogleDriveFormats(e) { var _form = _formgoogleDrive; var errors = false; var formats = _form.find('input[name="formats"]:checked'); if (formats.length < 1) { _form.find('.error.formats').show(); errors = true; } else { _form.find('.error.formats').hide(); } return errors; } function validategoogleDriveUsagePolicy(e) { var _form = _formgoogleDrive; var usagePolicy = _form.find('input[name="usagePolicygoogleDrive"]'); var errors = false; if (!usagePolicy.is(':checked')) { _form.find('.error.usagePolicy').show(); errors = true; } else { _form.find('.error.usagePolicy').hide(); } return errors; } function validategoogleDriveForm(e) { var _form = _formgoogleDrive; var resultPolicy = validategoogleDriveUsagePolicy(); var resultFormats = validategoogleDriveFormats(); if (resultPolicy) { $('#usagePolicygoogleDrive', _form).one('change', validategoogleDriveUsagePolicy); } if (resultFormats) { $('.formats', _form).one('change', validategoogleDriveFormats); } return !resultPolicy && !resultFormats ; } $(document).ready(function () { // reset validation when the modal opens $(document).on('open.fndtn.reveal', '[data-reveal]', function () { resetValidationgoogleDriveForm(); // automatically select if there's only one format if ($('#sendTogoogleDriveForm input[name="formats"]').length === 1) { $('#sendTogoogleDriveForm input[name="formats"]').prop('checked', true); } }); }); </script><div id="post-comments-modal" class="reveal-modal data-reveal medium" data-reveal > <a class="close-reveal-modal">×</a> <section> <div class="title-underline"> <h4 class="heading_03"> <span class="reply-only">Reply to:</span> <span class="reply-only" id="reply-comment-title"></span> <span class="comments-only">Submit a response</span> </h4> <div class="border"></div> </div> <form id="postCommentsForm" autocomplete="off" action="/core/services/aop-cambridge-core/comments/submit" method="POST" data-abide=""> <input type="hidden" name="_csrf" value="sRpck057-QH20_Gw4aG4x5CvlfZjo_GbDahE"> <input type="hidden" name="_pid" id="pid" value=""> <input type="hidden" name="_cid" id="cid" value=""> <div class="row"> <div class="row comments-only"> <div class="small-12 columns"> <label for="title">Title * <input name="title" id="title" type="text" maxlength="250" required="required" /> <small class="error">Please enter a title for your response.</small> </label> </div> </div> <div class="row"> <div class="small-12 columns"> <label class="inline" for="comment">Contents *</label> <span > <a href="#" data-dropdown="comment-tip" aria-expanded="false" aria-controls="comment-tip" class="icon info tooltip-icon info-icon" > <span class="sr-only">Contents help</span> <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 37 37" aria-hidden="true" focusable="false"> <circle fill="#FFF" stroke="#0072CF" stroke-miterlimit="10" cx="18.5" cy="18.5" r="17.5"/> <path fill="#0072CF" d="M20.4 25.3V15.2h-4.9v2.2h1.1v7.9h-1.1v2.2h6v-2.2h-1.1zm-1.9-11.4c1 0 1.9-1 1.9-2.2 0-1.2-.8-2.2-1.9-2.2s-1.9 1-1.9 2.2c0 1.2.9 2.2 1.9 2.2z"/> </svg> </a> </span> <div id="comment-tip" data-dropdown-content role="dialog" aria-modal="true" aria-hidden="true" tabindex="-1" class="f-dropdown content medium" data-remove-focus="true" aria-label="Contents information"> <div class="close-container"> <a href="#" class="button small transparent-no-border radius tooltip-close-btn" id="tooltip-close-link-comment-tip"> <span class="custom-tooltip-button-remove"> <span class="sr-only">Close Contents help</span> <svg width="20" height="20" viewBox="0 0 20 20" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" focusable="false"> <defs> <path d="M9.986 0C4.479 0 0 4.434 0 9.916c0 5.469 4.465 9.916 9.986 9.916 5.507 0 9.986-4.433 9.986-9.916C19.958 4.434 15.493 0 9.986 0z" id="a-comment-tip"/> <mask id="b-comment-tip" x="0" y="0" width="19.972" height="19.832" fill="#fff"><use xlink:href="#a-comment-tip"/></mask> </defs> <g fill="none" fill-rule="evenodd"> <use stroke="#436FCC" mask="url(#b-comment-tip)" stroke-width="2" xlink:href="#a-comment-tip"/> <path d="M14.778 13.325a.983.983 0 0 1 0 1.385.982.982 0 0 1-.704.28c-.254 0-.507-.098-.704-.28l-3.352-3.329-3.353 3.329a.973.973 0 0 1-.69.28 1.01 1.01 0 0 1-.69-.28.983.983 0 0 1 0-1.385l3.352-3.328-3.352-3.33a.983.983 0 0 1 0-1.384 1 1 0 0 1 1.395 0l3.352 3.329 3.352-3.329a1 1 0 0 1 1.394 0 .983.983 0 0 1 0 1.385l-3.352 3.329 3.352 3.328z" fill="#436FCC"/> </g> </svg> </span> </a> </div> <p id="info-comment-tip" class="text-left">- No HTML tags allowed<br />- Web page URLs will display as text only<br />- Lines and paragraphs break automatically<br />- Attachments, images or tables are not permitted</p> </div> <textarea name="comment" id="comment" rows="5" maxlength="60000" required="required"></textarea> <small class="error">Please enter your response.</small> </div> </div> </div> <hr/> <div class="title-underline"> <h4 class="heading_03">Your details</h4> <div class="border"></div> </div> <section id="contributor-section"> <div id="contributor-row_0" class="contributor-row"> <div class="large-6 medium-12 small-12 columns left-col"> <div> <label for="firstname_0">First name * <input name="firstname_0" id="firstname_0" type="text" required="required" maxlength="100" placeholder="Enter your first name" /> <small class="error">Please enter your first name.</small> </label> </div> <div> <label for="lastName_0">Last name * <input name="lastName_0" id="lastName_0" type="text" required="required" maxlength="100" placeholder="Enter your last name" /> <small class="error">Please enter your last name.</small> </label> </div> <div> <label for="emailAddress_0" class="inline-tooltip">Email *</label> <span > <a href="#" data-dropdown="comment-email-tip_0" aria-expanded="false" aria-controls="comment-email-tip_0" class="icon info tooltip-icon info-icon" > <span class="sr-only">Email help</span> <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 37 37" aria-hidden="true" focusable="false"> <circle fill="#FFF" stroke="#0072CF" stroke-miterlimit="10" cx="18.5" cy="18.5" r="17.5"/> <path fill="#0072CF" d="M20.4 25.3V15.2h-4.9v2.2h1.1v7.9h-1.1v2.2h6v-2.2h-1.1zm-1.9-11.4c1 0 1.9-1 1.9-2.2 0-1.2-.8-2.2-1.9-2.2s-1.9 1-1.9 2.2c0 1.2.9 2.2 1.9 2.2z"/> </svg> </a> </span> <div id="comment-email-tip_0" data-dropdown-content role="dialog" aria-modal="true" aria-hidden="true" tabindex="-1" class="f-dropdown content medium" data-remove-focus="true" aria-label="Email information"> <div class="close-container"> <a href="#" class="button small transparent-no-border radius tooltip-close-btn" id="tooltip-close-link-comment-email-tip_0"> <span class="custom-tooltip-button-remove"> <span class="sr-only">Close Email help</span> <svg width="20" height="20" viewBox="0 0 20 20" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" focusable="false"> <defs> <path d="M9.986 0C4.479 0 0 4.434 0 9.916c0 5.469 4.465 9.916 9.986 9.916 5.507 0 9.986-4.433 9.986-9.916C19.958 4.434 15.493 0 9.986 0z" id="a-comment-email-tip_0"/> <mask id="b-comment-email-tip_0" x="0" y="0" width="19.972" height="19.832" fill="#fff"><use xlink:href="#a-comment-email-tip_0"/></mask> </defs> <g fill="none" fill-rule="evenodd"> <use stroke="#436FCC" mask="url(#b-comment-email-tip_0)" stroke-width="2" xlink:href="#a-comment-email-tip_0"/> <path d="M14.778 13.325a.983.983 0 0 1 0 1.385.982.982 0 0 1-.704.28c-.254 0-.507-.098-.704-.28l-3.352-3.329-3.353 3.329a.973.973 0 0 1-.69.28 1.01 1.01 0 0 1-.69-.28.983.983 0 0 1 0-1.385l3.352-3.328-3.352-3.33a.983.983 0 0 1 0-1.384 1 1 0 0 1 1.395 0l3.352 3.329 3.352-3.329a1 1 0 0 1 1.394 0 .983.983 0 0 1 0 1.385l-3.352 3.329 3.352 3.328z" fill="#436FCC"/> </g> </svg> </span> </a> </div> <p id="info-comment-email-tip_0" class="text-left">Your email address will be used in order to notify you when your comment has been reviewed by the moderator and in case the author(s) of the article or the moderator need to contact you directly.</p> </div> <input name="emailAddress_0" id="emailAddress_0" type="text" required="required" maxlength="100" pattern="email" placeholder="Enter your email" /> <small class="error">Please enter a valid email address.</small> </div> </div> <div class="large-6 medium-12 small-12 columns"> <div class="row"> <label for="occupation_0">Occupation <input name="occupation_0" id="occupation_0" type="text" maxlength="100" placeholder="Enter your role and/or occupation" /> <small class="error">Please enter your occupation.</small> </label> </div> <div class="row"> <label for="organisation_0">Affiliation <input name="organisation_0" id="organisation_0" type="text" maxlength="100" placeholder="Enter your organisation or institution name" /> <small class="error">Please enter any affiliation.</small> </label> </div> </div> </div> </section> <div class="contributor-btn"> <input id="add-contributor-btn" type="button" class="blue small button radius add-contributor" value="Add contributor"> </div> <div class="add-contributor-limit-reached"> <hr> <h5 class="heading_05">You have entered the maximum number of contributors</h5> </div> <hr/> <div class="title-underline"> <h4 class="heading_03">Conflicting interests</h4> <div class="border"></div> </div> <div class="large-6 medium-12 small-12 columns"> <p> <span > Do you have any conflicting interests? * <a href="#" data-dropdown="comment-conflict-tip" aria-expanded="false" aria-controls="comment-conflict-tip" class="icon info tooltip-icon info-icon" > <span class="sr-only">Conflicting interests help</span> <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 37 37" aria-hidden="true" focusable="false"> <circle fill="#FFF" stroke="#0072CF" stroke-miterlimit="10" cx="18.5" cy="18.5" r="17.5"/> <path fill="#0072CF" d="M20.4 25.3V15.2h-4.9v2.2h1.1v7.9h-1.1v2.2h6v-2.2h-1.1zm-1.9-11.4c1 0 1.9-1 1.9-2.2 0-1.2-.8-2.2-1.9-2.2s-1.9 1-1.9 2.2c0 1.2.9 2.2 1.9 2.2z"/> </svg> </a> </span> <div id="comment-conflict-tip" data-dropdown-content role="dialog" aria-modal="true" aria-hidden="true" tabindex="-1" class="f-dropdown content medium" data-remove-focus="true" aria-label="Conflicting interests information"> <div class="close-container"> <a href="#" class="button small transparent-no-border radius tooltip-close-btn" id="tooltip-close-link-comment-conflict-tip"> <span class="custom-tooltip-button-remove"> <span class="sr-only">Close Conflicting interests help</span> <svg width="20" height="20" viewBox="0 0 20 20" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" focusable="false"> <defs> <path d="M9.986 0C4.479 0 0 4.434 0 9.916c0 5.469 4.465 9.916 9.986 9.916 5.507 0 9.986-4.433 9.986-9.916C19.958 4.434 15.493 0 9.986 0z" id="a-comment-conflict-tip"/> <mask id="b-comment-conflict-tip" x="0" y="0" width="19.972" height="19.832" fill="#fff"><use xlink:href="#a-comment-conflict-tip"/></mask> </defs> <g fill="none" fill-rule="evenodd"> <use stroke="#436FCC" mask="url(#b-comment-conflict-tip)" stroke-width="2" xlink:href="#a-comment-conflict-tip"/> <path d="M14.778 13.325a.983.983 0 0 1 0 1.385.982.982 0 0 1-.704.28c-.254 0-.507-.098-.704-.28l-3.352-3.329-3.353 3.329a.973.973 0 0 1-.69.28 1.01 1.01 0 0 1-.69-.28.983.983 0 0 1 0-1.385l3.352-3.328-3.352-3.33a.983.983 0 0 1 0-1.384 1 1 0 0 1 1.395 0l3.352 3.329 3.352-3.329a1 1 0 0 1 1.394 0 .983.983 0 0 1 0 1.385l-3.352 3.329 3.352 3.328z" fill="#436FCC"/> </g> </svg> </span> </a> </div> <p id="info-comment-conflict-tip" class="text-left">Please list any fees and grants from, employment by, consultancy for, shared ownership in or any close relationship with, at any time over the preceding 36 months, any organisation whose interests may be affected by the publication of the response. Please also list any non-financial associations or interests (personal, professional, political, institutional, religious or other) that a reasonable reader would want to know about in relation to the submitted work. This pertains to all the authors of the piece, their spouses or partners.</p> </div> </p> <label> <input type="radio" name="conflictInterest" id="frm-conflict-on" value="true"> Yes </label> <label> <input type="radio" name="conflictInterest" id="frm-conflict-off" checked="checked" value="false"> No </label> </div> <div class="large-6 medium-12 small-12 columns comment-more-info"> <label>More information * <textarea name="conflictInfo" id="conflictInfo" rows="3" maxlength="500" placeholder="Enter details of your conflicting interests" disabled="false" required="required"></textarea> <small class="error">Please enter details of the conflict of interest or select 'No'.</small> </label> </div> <hr /> <div class="row margin-top"> <div class="large-12 columns"> <label class="paragraph_05"> <input name="agreeToTerms" type="checkbox" value="yes" id="agreeToTerms" required="required" /> Please tick the box to confirm you agree to our <a href="/core/legal-notices/terms" target="_blank">Terms of use</a>. *<br /><br /> <small class="error">Please accept terms of use.</small> </label> </div> </div> <div class="row margin-top"> <div class="large-12 columns"> <label class="paragraph_05"> <input name="agreePrint" type="checkbox" value="yes" id="agreePrint" required="required" /> Please tick the box to confirm you agree that your name, comment and conflicts of interest (if accepted) will be visible on the website and your comment may be printed in the journal at the Editor’s discretion. *<br /><br /> <small class="error">Please confirm you agree that your details will be displayed.</small> </label> </div> </div> <hr /> <div class="submit-btn"> <input type="submit" class="blue small button radius" value=Submit> </div> </form> </section> </div> <script type="text/template" data-template="contributorsTemplate"> <div id="contributor-row_--x--" class="contributor-row contributor-row-hide"> <hr /> <div class="large-6 medium-12 small-12 columns left-col"> <div> <label for="firstname_--x--">First name * <input name="firstname_--x--" id="firstname_--x--" type="text" required="required" maxlength="100" placeholder="Enter contributor first name" /> <small class="error">Please enter your first name.</small> </label> </div> <div> <label for="lastName_--x--">Last name * <input name="lastName_--x--" id="lastName_--x--" type="text" required="required" maxlength="100" placeholder="Enter contributor last name" /> <small class="error">Please enter your last name.</small> </label> </div> <div> <label for="emailAddress_--x--" class="inline-tooltip">Email *</label> <span > <a href="#" data-dropdown="comment-email-tip_--x--" aria-expanded="false" aria-controls="comment-email-tip_--x--" class="icon info tooltip-icon info-icon" > <span class="sr-only">Email help</span> <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 37 37" aria-hidden="true" focusable="false"> <circle fill="#FFF" stroke="#0072CF" stroke-miterlimit="10" cx="18.5" cy="18.5" r="17.5"/> <path fill="#0072CF" d="M20.4 25.3V15.2h-4.9v2.2h1.1v7.9h-1.1v2.2h6v-2.2h-1.1zm-1.9-11.4c1 0 1.9-1 1.9-2.2 0-1.2-.8-2.2-1.9-2.2s-1.9 1-1.9 2.2c0 1.2.9 2.2 1.9 2.2z"/> </svg> </a> </span> <div id="comment-email-tip_--x--" data-dropdown-content role="dialog" aria-modal="true" aria-hidden="true" tabindex="-1" class="f-dropdown content medium" data-remove-focus="true" aria-label="Email information"> <div class="close-container"> <a href="#" class="button small transparent-no-border radius tooltip-close-btn" id="tooltip-close-link-comment-email-tip_--x--"> <span class="custom-tooltip-button-remove"> <span class="sr-only">Close Email help</span> <svg width="20" height="20" viewBox="0 0 20 20" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" focusable="false"> <defs> <path d="M9.986 0C4.479 0 0 4.434 0 9.916c0 5.469 4.465 9.916 9.986 9.916 5.507 0 9.986-4.433 9.986-9.916C19.958 4.434 15.493 0 9.986 0z" id="a-comment-email-tip_--x--"/> <mask id="b-comment-email-tip_--x--" x="0" y="0" width="19.972" height="19.832" fill="#fff"><use xlink:href="#a-comment-email-tip_--x--"/></mask> </defs> <g fill="none" fill-rule="evenodd"> <use stroke="#436FCC" mask="url(#b-comment-email-tip_--x--)" stroke-width="2" xlink:href="#a-comment-email-tip_--x--"/> <path d="M14.778 13.325a.983.983 0 0 1 0 1.385.982.982 0 0 1-.704.28c-.254 0-.507-.098-.704-.28l-3.352-3.329-3.353 3.329a.973.973 0 0 1-.69.28 1.01 1.01 0 0 1-.69-.28.983.983 0 0 1 0-1.385l3.352-3.328-3.352-3.33a.983.983 0 0 1 0-1.384 1 1 0 0 1 1.395 0l3.352 3.329 3.352-3.329a1 1 0 0 1 1.394 0 .983.983 0 0 1 0 1.385l-3.352 3.329 3.352 3.328z" fill="#436FCC"/> </g> </svg> </span> </a> </div> <p id="info-comment-email-tip_--x--" class="text-left">Your email address will be used in order to notify you when your comment has been reviewed by the moderator and in case the author(s) of the article or the moderator need to contact you directly.</p> </div> <input name="emailAddress_--x--" id="emailAddress_--x--" type="text" required="required" maxlength="100" pattern="email" placeholder="Enter contributor email" /> <small class="error">Please enter a valid email address.</small> </div> </div> <div class="large-6 medium-12 small-12 columns"> <div class="row"> <label for="occupation_--x--">Occupation <input name="occupation_--x--" id="occupation_--x--" type="text" maxlength="100" placeholder="Enter contributor role and/or occupation" /> <small class="error">Please enter your occupation.</small> </label> </div> <div class="row"> <label for="organisation_0">Affiliation <input name="organisation_--x--" id="organisation_--x--" type="text" maxlength="100" placeholder="Enter contributor organisation or institution name" /> <small class="error">Please enter any affiliation.</small> </label> </div> <input type="button" id="remove-contributor_--x--" class="small button alert radius remove-contributor" value="Remove contributor"> </div> </div></script> <script> window.AOP.basket.currency = 'USD'; window.AOP.basket.isEnabled = true; </script> <script> /* style checkboxes and radio inputs */ AOP.styleInputElements = function (container) { var selector; // Use '.no-style' on the input element to exclude checkboxes from styling if (container) { selector = $('input[type=checkbox], input[type=radio]', container).not('.styled,.no-style'); } else { selector = $('input[type=checkbox], input[type=radio]').not('.styled,.no-style'); } selector.each(function (i, el) { $(this).addClass('styled'); el = $(el)[0]; var doneAlready = (el && el.nextElementSibling && el.nextElementSibling.outerHTML === '<span></span>'); if (!doneAlready) { $(this).after($('<span></span>')); } }); }; $(document).ready(function () { AOP.styleInputElements(); }); </script>