CINXE.COM
Emerging Technologies
<!DOCTYPE html> <html lang="en"> <head> <title>Emerging Technologies </title> <meta name="viewport" content="width=device-width, initial-scale=1"> <link rel="apple-touch-icon" sizes="180x180" href="/static/browse/0.3.4/images/icons/apple-touch-icon.png"> <link rel="icon" type="image/png" sizes="32x32" href="/static/browse/0.3.4/images/icons/favicon-32x32.png"> <link rel="icon" type="image/png" sizes="16x16" href="/static/browse/0.3.4/images/icons/favicon-16x16.png"> <link rel="manifest" href="/static/browse/0.3.4/images/icons/site.webmanifest"> <link rel="mask-icon" href="/static/browse/0.3.4/images/icons/safari-pinned-tab.svg" color="#5bbad5"> <meta name="msapplication-TileColor" content="#da532c"> <meta name="theme-color" content="#ffffff"> <link rel="stylesheet" type="text/css" media="screen" href="/static/browse/0.3.4/css/arXiv.css?v=20241206" /> <link rel="stylesheet" type="text/css" media="print" href="/static/browse/0.3.4/css/arXiv-print.css?v=20200611" /> <link rel="stylesheet" type="text/css" media="screen" href="/static/browse/0.3.4/css/browse_search.css" /> <script language="javascript" src="/static/browse/0.3.4/js/accordion.js" /></script> <script src="/static/browse/0.3.4/js/mathjaxToggle.min.js" type="text/javascript"></script> <script type="text/javascript" language="javascript">mathjaxToggle();</script> </head> <body class="with-cu-identity"> <div class="flex-wrap-footer"> <header> <a href="#content" class="is-sr-only">Skip to main content</a> <!-- start desktop header --> <div class="columns is-vcentered is-hidden-mobile" id="cu-identity"> <div class="column" id="cu-logo"> <a href="https://www.cornell.edu/"><img src="/static/browse/0.3.4/images/icons/cu/cornell-reduced-white-SMALL.svg" alt="Cornell University" /></a> </div><div class="column" id="support-ack"> <span id="support-ack-url">We gratefully acknowledge support from the Simons Foundation, <a href="https://info.arxiv.org/about/ourmembers.html">member institutions</a>, and all contributors.</span> <a href="https://info.arxiv.org/about/donate.html" class="btn-header-donate">Donate</a> </div> </div> <div id="header" class="is-hidden-mobile"> <a aria-hidden="true" tabindex="-1" href="/IgnoreMe"></a> <div class="header-breadcrumbs"> <a href="/"><img src="/static/browse/0.3.4/images/arxiv-logo-one-color-white.svg" alt="arxiv logo" style="height:40px;"/></a> <span>></span> <a href="/list/cs.ET/recent">cs.ET</a> </div> <div class="search-block level-right"> <form class="level-item mini-search" method="GET" action="https://arxiv.org/search"> <div class="field has-addons"> <div class="control"> <input class="input is-small" type="text" name="query" placeholder="Search..." aria-label="Search term or terms" /> <p class="help"><a href="https://info.arxiv.org/help">Help</a> | <a href="https://arxiv.org/search/advanced">Advanced Search</a></p> </div> <div class="control"> <div class="select is-small"> <select name="searchtype" aria-label="Field to search"> <option value="all" selected="selected">All fields</option> <option value="title">Title</option> <option value="author">Author</option> <option value="abstract">Abstract</option> <option value="comments">Comments</option> <option value="journal_ref">Journal reference</option> <option value="acm_class">ACM classification</option> <option value="msc_class">MSC classification</option> <option value="report_num">Report number</option> <option value="paper_id">arXiv identifier</option> <option value="doi">DOI</option> <option value="orcid">ORCID</option> <option value="author_id">arXiv author ID</option> <option value="help">Help pages</option> <option value="full_text">Full text</option> </select> </div> </div> <input type="hidden" name="source" value="header"> <button class="button is-small is-cul-darker">Search</button> </div> </form> </div> </div><!-- /end desktop header --> <div class="mobile-header"> <div class="columns is-mobile"> <div class="column logo-arxiv"><a href="https://arxiv.org/"><img src="/static/browse/0.3.4/images/arxiv-logomark-small-white.svg" alt="arXiv logo" style="height:60px;" /></a></div> <div class="column logo-cornell"><a href="https://www.cornell.edu/"> <picture> <source media="(min-width: 501px)" srcset="/static/browse/0.3.4/images/icons/cu/cornell-reduced-white-SMALL.svg 400w" sizes="400w" /> <source srcset="/static/browse/0.3.4/images/icons/cu/cornell_seal_simple_black.svg 2x" /> <img src="/static/browse/0.3.4/images/icons/cu/cornell-reduced-white-SMALL.svg" alt="Cornell University Logo" /> </picture> </a></div> <div class="column nav" id="toggle-container" role="menubar"> <button class="toggle-control"><svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512" class="icon filter-white"><title>open search</title><path d="M505 442.7L405.3 343c-4.5-4.5-10.6-7-17-7H372c27.6-35.3 44-79.7 44-128C416 93.1 322.9 0 208 0S0 93.1 0 208s93.1 208 208 208c48.3 0 92.7-16.4 128-44v16.3c0 6.4 2.5 12.5 7 17l99.7 99.7c9.4 9.4 24.6 9.4 33.9 0l28.3-28.3c9.4-9.4 9.4-24.6.1-34zM208 336c-70.7 0-128-57.2-128-128 0-70.7 57.2-128 128-128 70.7 0 128 57.2 128 128 0 70.7-57.2 128-128 128z"/></svg></button> <div class="mobile-toggle-block toggle-target"> <form class="mobile-search-form" method="GET" action="https://arxiv.org/search"> <div class="field has-addons"> <input class="input" type="text" name="query" placeholder="Search..." aria-label="Search term or terms" /> <input type="hidden" name="source" value="header"> <input type="hidden" name="searchtype" value="all"> <button class="button">GO</button> </div> </form> </div> <button class="toggle-control"><svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 512" class="icon filter-white" role="menu"><title>open navigation menu</title><path d="M16 132h416c8.837 0 16-7.163 16-16V76c0-8.837-7.163-16-16-16H16C7.163 60 0 67.163 0 76v40c0 8.837 7.163 16 16 16zm0 160h416c8.837 0 16-7.163 16-16v-40c0-8.837-7.163-16-16-16H16c-8.837 0-16 7.163-16 16v40c0 8.837 7.163 16 16 16zm0 160h416c8.837 0 16-7.163 16-16v-40c0-8.837-7.163-16-16-16H16c-8.837 0-16 7.163-16 16v40c0 8.837 7.163 16 16 16z"/ ></svg></button> <div class="mobile-toggle-block toggle-target"> <nav class="mobile-menu" aria-labelledby="mobilemenulabel"> <h2 id="mobilemenulabel">quick links</h2> <ul> <li><a href="https://arxiv.org/login">Login</a></li> <li><a href="https://info.arxiv.org/help">Help Pages</a></li> <li><a href="https://info.arxiv.org/about">About</a></li> </ul> </nav> </div> </div> </div> </div><!-- /end mobile-header --> </header> <main> <div id="content"> <div id='content-inner'> <div id='dlpage'> <h1>Emerging Technologies</h1> <ul> <li><a href="#item0">New submissions</a></li> <li><a href="#item5">Cross-lists</a></li> <li><a href="#item9">Replacements</a></li> </ul> <p>See <a id="recent-cs.ET" aria-labelledby="recent-cs.ET" href="/list/cs.ET/recent">recent</a> articles</p> <h3>Showing new listings for Wednesday, 19 March 2025</h3> <div class='paging'>Total of 9 entries </div> <div class='morefewer'>Showing up to 2000 entries per page: <a href=/list/cs.ET/new?skip=0&show=1000 rel="nofollow"> fewer</a> | <span style="color: #454545">more</span> | <span style="color: #454545">all</span> </div> <dl id='articles'> <h3>New submissions (showing 4 of 4 entries)</h3> <dt> <a name='item1'>[1]</a> <a href ="/abs/2503.13809" title="Abstract" id="2503.13809"> arXiv:2503.13809 </a> [<a href="/pdf/2503.13809" title="Download PDF" id="pdf-2503.13809" aria-labelledby="pdf-2503.13809">pdf</a>, <a href="https://arxiv.org/html/2503.13809v1" title="View HTML" id="html-2503.13809" aria-labelledby="html-2503.13809" rel="noopener noreferrer" target="_blank">html</a>, <a href="/format/2503.13809" title="Other formats" id="oth-2503.13809" aria-labelledby="oth-2503.13809">other</a>] </dt> <dd> <div class='meta'> <div class='list-title mathjax'><span class='descriptor'>Title:</span> The Immersive Archive: Archival Strategies for the Sensorama & Sutherland HMD </div> <div class='list-authors'><a href="https://arxiv.org/search/cs?searchtype=author&query=Abes,+Z">Zeynep Abes</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Fairchild,+N">Nathan Fairchild</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Lin,+S">Spencer Lin</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Wahba,+M">Michael Wahba</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Xiao,+K">Katrina Xiao</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Fisher,+S+S">Scott S. Fisher</a></div> <div class='list-journal-ref'><span class='descriptor'>Journal-ref:</span> Proc. IEEE Conf. AI & XR, 2025, pp. 307-312 </div> <div class='list-subjects'><span class='descriptor'>Subjects:</span> <span class="primary-subject">Emerging Technologies (cs.ET)</span>; Multimedia (cs.MM) </div> <p class='mathjax'> The Immersive Archive is an initiative dedicated to preserve and restore the groundbreaking works from across Extended Reality (XR) history. Originating at the University of Southern California's Mobile and Environmental Media Lab, this archive is committed to developing and exhibiting simulations of influential XR devices that have shaped immersive media over time. This paper examines the challenges and strategies involved in archiving seminal XR technologies, with a focus on Morton Heilig's Sensorama and Ivan Sutherland's HeadMounted Display. As pioneering prototypes in virtual and augmented reality, these devices provide valuable insights into the evolution of immersive media, highlighting both technological innovation and sensory experimentation. Through collaborative archival efforts with institutions such as the HMH Moving Image Archive at University of Southern California and the Computer History Museum, this research integrates media archaeology with digital preservation techniques. Emphasis is placed on documentation practices, restoration of physical artifacts and developing simulations of these historic experiences for contemporary virtual reality platforms. Our interdisciplinary approach to archival methodologies, which captures the multisensory and interactive qualities of these pioneering devices, has been instrumental in developing a framework for future immersive media preservation initiatives. By preserving the immersive essence of these early experiences, we lay the groundwork for future generations to explore and learn from the origins of immersive media. Safeguarding this rich legacy is essential to ensure these visionary works continue to inspire and shape the future of media landscapes. </p> </div> </dd> <dt> <a name='item2'>[2]</a> <a href ="/abs/2503.13819" title="Abstract" id="2503.13819"> arXiv:2503.13819 </a> [<a href="/pdf/2503.13819" title="Download PDF" id="pdf-2503.13819" aria-labelledby="pdf-2503.13819">pdf</a>, <a href="https://arxiv.org/html/2503.13819v1" title="View HTML" id="html-2503.13819" aria-labelledby="html-2503.13819" rel="noopener noreferrer" target="_blank">html</a>, <a href="/format/2503.13819" title="Other formats" id="oth-2503.13819" aria-labelledby="oth-2503.13819">other</a>] </dt> <dd> <div class='meta'> <div class='list-title mathjax'><span class='descriptor'>Title:</span> LLM-Empowered IoT for 6G Networks: Architecture, Challenges, and Solutions </div> <div class='list-authors'><a href="https://arxiv.org/search/cs?searchtype=author&query=Chen,+X">Xiaopei Chen</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Wu,+W">Wen Wu</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Li,+Z">Zuguang Li</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Li,+L">Liang Li</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Ji,+F">Fei Ji</a></div> <div class='list-subjects'><span class='descriptor'>Subjects:</span> <span class="primary-subject">Emerging Technologies (cs.ET)</span> </div> <p class='mathjax'> The Internet of Things (IoT) in the sixth generation (6G) era is envisioned to evolve towards intelligence, ubiquity, and self-optimization. Large language models (LLMs) have demonstrated remarkable generalization capabilities across diverse domains, including natural language processing (NLP), computer vision (CV), and beyond. In this article, we propose an LLM-empowered IoT architecture for 6G networks to achieve intelligent autonomy while supporting advanced IoT applications. LLMs are pushed to the edge of the 6G network to support the synergy of LLMs and IoT. LLM solutions are tailored to both IoT application requirements and IoT management needs, i.e., LLM for IoT. On the other hand, edge inference and edge fine-tuning are discussed to support the deployment of LLMs, i.e., LLM on IoT. Furthermore, we propose a memory-efficient split federated learning (SFL) framework for LLM fine-tuning on heterogeneous IoT devices that alleviates memory pressures on both IoT devices and the edge server while achieving comparable performance and convergence time. Finally, a case study is presented, followed by a discussion about open issues of LLM-empowered IoT for 6G networks. </p> </div> </dd> <dt> <a name='item3'>[3]</a> <a href ="/abs/2503.14126" title="Abstract" id="2503.14126"> arXiv:2503.14126 </a> [<a href="/pdf/2503.14126" title="Download PDF" id="pdf-2503.14126" aria-labelledby="pdf-2503.14126">pdf</a>, <a href="https://arxiv.org/html/2503.14126v1" title="View HTML" id="html-2503.14126" aria-labelledby="html-2503.14126" rel="noopener noreferrer" target="_blank">html</a>, <a href="/format/2503.14126" title="Other formats" id="oth-2503.14126" aria-labelledby="oth-2503.14126">other</a>] </dt> <dd> <div class='meta'> <div class='list-title mathjax'><span class='descriptor'>Title:</span> Hardware Implementation of Ring Oscillator Networks Coupled by BEOL Integrated ReRAM for Associative Memory Tasks </div> <div class='list-authors'><a href="https://arxiv.org/search/cs?searchtype=author&query=Choi,+W">Wooseok Choi</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=van+Bodegraven,+T">Thomas van Bodegraven</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Verest,+J">Jelle Verest</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Maher,+O">Olivier Maher</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Falcone,+D+F">Donato Francesco Falcone</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=La+Porta,+A">Antonio La Porta</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Jubin,+D">Daniel Jubin</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Offrein,+B+J">Bert Jan Offrein</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Karg,+S">Siegfried Karg</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Bragaglia,+V">Valeria Bragaglia</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Todri-Sanial,+A">Aida Todri-Sanial</a></div> <div class='list-comments mathjax'><span class='descriptor'>Comments:</span> Accepted in IEEE IMW 2025 </div> <div class='list-subjects'><span class='descriptor'>Subjects:</span> <span class="primary-subject">Emerging Technologies (cs.ET)</span> </div> <p class='mathjax'> We demonstrate the first hardware implementation of an oscillatory neural network (ONN) utilizing resistive memory (ReRAM) for coupling elements. A ReRAM crossbar array chip, integrated into the Back End of Line (BEOL) of CMOS technology, is leveraged to establish dense coupling elements between oscillator neurons, allowing phase-encoded analog information to be processed in-memory. We also realize an ONN architecture design with the coupling ReRAM array. To validate the architecture experimentally, we present a conductive metal oxide (CMO)/HfOx ReRAM array chip integrated with a 2-by-2 ring oscillator-based network. The system successfully retrieves patterns through correct binary phase locking. This proof of concept underscores the potential of ReRAM technology for large-scale, integrated ONNs. </p> </div> </dd> <dt> <a name='item4'>[4]</a> <a href ="/abs/2503.14186" title="Abstract" id="2503.14186"> arXiv:2503.14186 </a> [<a href="/pdf/2503.14186" title="Download PDF" id="pdf-2503.14186" aria-labelledby="pdf-2503.14186">pdf</a>, <a href="https://arxiv.org/html/2503.14186v1" title="View HTML" id="html-2503.14186" aria-labelledby="html-2503.14186" rel="noopener noreferrer" target="_blank">html</a>, <a href="/format/2503.14186" title="Other formats" id="oth-2503.14186" aria-labelledby="oth-2503.14186">other</a>] </dt> <dd> <div class='meta'> <div class='list-title mathjax'><span class='descriptor'>Title:</span> 5G-Enabled Teleoperated Driving: An Experimental Evaluation </div> <div class='list-authors'><a href="https://arxiv.org/search/cs?searchtype=author&query=Testouri,+M">Mehdi Testouri</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Elghazaly,+G">Gamal Elghazaly</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Hawlader,+F">Faisal Hawlader</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Frank,+R">Raphael Frank</a></div> <div class='list-subjects'><span class='descriptor'>Subjects:</span> <span class="primary-subject">Emerging Technologies (cs.ET)</span>; Networking and Internet Architecture (cs.NI) </div> <p class='mathjax'> Teleoperated driving enables remote human intervention in autonomous vehicles, addressing challenges in complex driving environments. However, its effectiveness depends on ultra-low latency, high-reliability communication. This paper evaluates teleoperated driving over 5G networks, analyzing key performance metrics such as glass-to-glass (G2G) latency, RTT and steering command delay. Using a real-world testbed with a Kia Soul EV and a remote teleoperation platform, we assess the feasibility and limitations of 5G-enabled teleoperated driving. Our system achieved an average G2G latency of 202ms and an RTT of 47ms highlighting the G2G latency as the critical bottleneck. The steering control proved to be mostly accurate and responsive. Finally, this paper provides recommendations and outlines future work to improve future teleoperated driving deployments for safer and more reliable autonomous mobility. </p> </div> </dd> </dl> <dl id='articles'> <h3>Cross submissions (showing 4 of 4 entries)</h3> <dt> <a name='item5'>[5]</a> <a href ="/abs/2503.14062" title="Abstract" id="2503.14062"> arXiv:2503.14062 </a> (cross-list from quant-ph) [<a href="/pdf/2503.14062" title="Download PDF" id="pdf-2503.14062" aria-labelledby="pdf-2503.14062">pdf</a>, <a href="/format/2503.14062" title="Other formats" id="oth-2503.14062" aria-labelledby="oth-2503.14062">other</a>] </dt> <dd> <div class='meta'> <div class='list-title mathjax'><span class='descriptor'>Title:</span> Data Encoding for VQC in Qiskit, A Comparison With Novel Hybrid Encoding </div> <div class='list-authors'><a href="https://arxiv.org/search/quant-ph?searchtype=author&query=Biswas,+H">Hillol Biswas</a></div> <div class='list-comments mathjax'><span class='descriptor'>Comments:</span> 13 pdf pages in current format </div> <div class='list-subjects'><span class='descriptor'>Subjects:</span> <span class="primary-subject">Quantum Physics (quant-ph)</span>; Emerging Technologies (cs.ET) </div> <p class='mathjax'> If quantum machine learning emulates the ways of classical machine learning, data encoding in a quantum neural network is imperative for many reasons. One of the key ones is the complexity attributed to the data size depending upon the features and types, which is the essence of machine learning. While the standard various encoding techniques exist for quantum computing, hybrid one is not among many, though it tends to offer some distinct advantages, viz. efficient qubits utilization and increased entanglement, which fits well for variation quantum classifier algorithm by manipulating the essential criteria of ZZFeatureMaps and RealAmplitudes. While Amplitude encoding can turn traits normalized into quantum amplitudes, encoding an angle by using Ry gates to encode feature values into rotation angles, and phase encoding by using Rz gates to encode extra feature information as phase is plausible to combine all together. By combining these three methods, this paper demonstrates that efficient qubit usage is ensured as Amplitude encoding reduces the required qubits, Angle encoding makes state freedom better and is used for expressive encoding, and Phase-based distinction. Finally, using classical optimizers, the hybrid encoding technique through VQC is fit in training and testing using a synthetic dataset, and results have been compared to the standard VQC encoding in qiskit machine learning ecosystems. </p> </div> </dd> <dt> <a name='item6'>[6]</a> <a href ="/abs/2503.14102" title="Abstract" id="2503.14102"> arXiv:2503.14102 </a> (cross-list from cs.HC) [<a href="/pdf/2503.14102" title="Download PDF" id="pdf-2503.14102" aria-labelledby="pdf-2503.14102">pdf</a>, <a href="/format/2503.14102" title="Other formats" id="oth-2503.14102" aria-labelledby="oth-2503.14102">other</a>] </dt> <dd> <div class='meta'> <div class='list-title mathjax'><span class='descriptor'>Title:</span> Sensory-driven microinterventions for improved health and wellbeing </div> <div class='list-authors'><a href="https://arxiv.org/search/cs?searchtype=author&query=Abdalla,+Y">Youssef Abdalla</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Gatti,+E">Elia Gatti</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Orlu,+M">Mine Orlu</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Obrist,+M">Marianna Obrist</a></div> <div class='list-subjects'><span class='descriptor'>Subjects:</span> <span class="primary-subject">Human-Computer Interaction (cs.HC)</span>; Artificial Intelligence (cs.AI); Computers and Society (cs.CY); Emerging Technologies (cs.ET) </div> <p class='mathjax'> The five senses are gateways to our wellbeing and their decline is considered a significant public health challenge which is linked to multiple conditions that contribute significantly to morbidity and mortality. Modern technology, with its ubiquitous nature and fast data processing has the ability to leverage the power of the senses to transform our approach to day to day healthcare, with positive effects on our quality of life. Here, we introduce the idea of sensory-driven microinterventions for preventative, personalised healthcare. Microinterventions are targeted, timely, minimally invasive strategies that seamlessly integrate into our daily life. This idea harnesses human's sensory capabilities, leverages technological advances in sensory stimulation and real-time processing ability for sensing the senses. The collection of sensory data from our continuous interaction with technology - for example the tone of voice, gait movement, smart home behaviour - opens up a shift towards personalised technology-enabled, sensory-focused healthcare interventions, coupled with the potential of early detection and timely treatment of sensory deficits that can signal critical health insights, especially for neurodegenerative diseases such as Parkinson's disease. </p> </div> </dd> <dt> <a name='item7'>[7]</a> <a href ="/abs/2503.14354" title="Abstract" id="2503.14354"> arXiv:2503.14354 </a> (cross-list from cs.AR) [<a href="/pdf/2503.14354" title="Download PDF" id="pdf-2503.14354" aria-labelledby="pdf-2503.14354">pdf</a>, <a href="/format/2503.14354" title="Other formats" id="oth-2503.14354" aria-labelledby="oth-2503.14354">other</a>] </dt> <dd> <div class='meta'> <div class='list-title mathjax'><span class='descriptor'>Title:</span> Retrospective: A CORDIC Based Configurable Activation Function for NN Applications </div> <div class='list-authors'><a href="https://arxiv.org/search/cs?searchtype=author&query=Kokane,+O">Omkar Kokane</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Raut,+G">Gopal Raut</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Ullah,+S">Salim Ullah</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Lokhande,+M">Mukul Lokhande</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Teman,+A">Adam Teman</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Kumar,+A">Akash Kumar</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Vishvakarma,+S+K">Santosh Kumar Vishvakarma</a></div> <div class='list-subjects'><span class='descriptor'>Subjects:</span> <span class="primary-subject">Hardware Architecture (cs.AR)</span>; Artificial Intelligence (cs.AI); Computer Vision and Pattern Recognition (cs.CV); Emerging Technologies (cs.ET); Image and Video Processing (eess.IV) </div> <p class='mathjax'> A CORDIC-based configuration for the design of Activation Functions (AF) was previously suggested to accelerate ASIC hardware design for resource-constrained systems by providing functional reconfigurability. Since its introduction, this new approach for neural network acceleration has gained widespread popularity, influencing numerous designs for activation functions in both academic and commercial AI processors. In this retrospective analysis, we explore the foundational aspects of this initiative, summarize key developments over recent years, and introduce the DA-VINCI AF tailored for the evolving needs of AI applications. This new generation of dynamically configurable and precision-adjustable activation function cores promise greater adaptability for a range of activation functions in AI workloads, including Swish, SoftMax, SeLU, and GeLU, utilizing the Shift-and-Add CORDIC technique. The previously presented design has been optimized for MAC, Sigmoid, and Tanh functionalities and incorporated into ReLU AFs, culminating in an accumulative NEURIC compute unit. These enhancements position NEURIC as a fundamental component in the resource-efficient vector engine for the realization of AI accelerators that focus on DNNs, RNNs/LSTMs, and Transformers, achieving a quality of results (QoR) of 98.5%. </p> </div> </dd> <dt> <a name='item8'>[8]</a> <a href ="/abs/2503.14473" title="Abstract" id="2503.14473"> arXiv:2503.14473 </a> (cross-list from quant-ph) [<a href="/pdf/2503.14473" title="Download PDF" id="pdf-2503.14473" aria-labelledby="pdf-2503.14473">pdf</a>, <a href="https://arxiv.org/html/2503.14473v1" title="View HTML" id="html-2503.14473" aria-labelledby="html-2503.14473" rel="noopener noreferrer" target="_blank">html</a>, <a href="/format/2503.14473" title="Other formats" id="oth-2503.14473" aria-labelledby="oth-2503.14473">other</a>] </dt> <dd> <div class='meta'> <div class='list-title mathjax'><span class='descriptor'>Title:</span> EnQode: Fast Amplitude Embedding for Quantum Machine Learning Using Classical Data </div> <div class='list-authors'><a href="https://arxiv.org/search/quant-ph?searchtype=author&query=Han,+J">Jason Han</a>, <a href="https://arxiv.org/search/quant-ph?searchtype=author&query=DiBrita,+N+S">Nicholas S. DiBrita</a>, <a href="https://arxiv.org/search/quant-ph?searchtype=author&query=Cho,+Y">Younghyun Cho</a>, <a href="https://arxiv.org/search/quant-ph?searchtype=author&query=Luo,+H">Hengrui Luo</a>, <a href="https://arxiv.org/search/quant-ph?searchtype=author&query=Patel,+T">Tirthak Patel</a></div> <div class='list-comments mathjax'><span class='descriptor'>Comments:</span> EnQode will appear in the Proceedings of the Design Automation Conference (DAC), 2025 </div> <div class='list-subjects'><span class='descriptor'>Subjects:</span> <span class="primary-subject">Quantum Physics (quant-ph)</span>; Emerging Technologies (cs.ET); Machine Learning (cs.LG) </div> <p class='mathjax'> Amplitude embedding (AE) is essential in quantum machine learning (QML) for encoding classical data onto quantum circuits. However, conventional AE methods suffer from deep, variable-length circuits that introduce high output error due to extensive gate usage and variable error rates across samples, resulting in noise-driven inconsistencies that degrade model accuracy. We introduce EnQode, a fast AE technique based on symbolic representation that addresses these limitations by clustering dataset samples and solving for cluster mean states through a low-depth, machine-specific ansatz. Optimized to reduce physical gates and SWAP operations, EnQode ensures all samples face consistent, low noise levels by standardizing circuit depth and composition. With over 90% fidelity in data mapping, EnQode enables robust, high-performance QML on noisy intermediate-scale quantum (NISQ) devices. Our open-source solution provides a scalable and efficient alternative for integrating classical data with quantum models. </p> </div> </dd> </dl> <dl id='articles'> <h3>Replacement submissions (showing 1 of 1 entries)</h3> <dt> <a name='item9'>[9]</a> <a href ="/abs/2412.04908" title="Abstract" id="2412.04908"> arXiv:2412.04908 </a> (replaced) [<a href="/pdf/2412.04908" title="Download PDF" id="pdf-2412.04908" aria-labelledby="pdf-2412.04908">pdf</a>, <a href="https://arxiv.org/html/2412.04908v2" title="View HTML" id="html-2412.04908" aria-labelledby="html-2412.04908" rel="noopener noreferrer" target="_blank">html</a>, <a href="/format/2412.04908" title="Other formats" id="oth-2412.04908" aria-labelledby="oth-2412.04908">other</a>] </dt> <dd> <div class='meta'> <div class='list-title mathjax'><span class='descriptor'>Title:</span> MERCI: Multimodal Emotional and peRsonal Conversational Interactions Dataset </div> <div class='list-authors'><a href="https://arxiv.org/search/cs?searchtype=author&query=Althubyani,+M">Mohammed Althubyani</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Meng,+Z">Zhijin Meng</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Xie,+S">Shengyuan Xie</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Seung,+C">Cha Seung</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Razzak,+I">Imran Razzak</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Sandoval,+E+B">Eduardo B. Sandoval</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Kocaballi,+B">Baki Kocaballi</a>, <a href="https://arxiv.org/search/cs?searchtype=author&query=Cruz,+F">Francisco Cruz</a></div> <div class='list-comments mathjax'><span class='descriptor'>Comments:</span> 9 pages, 5 Figures, Rejected from International Conference of Human Robot Interaction 2025, Melbourne, Australia </div> <div class='list-subjects'><span class='descriptor'>Subjects:</span> <span class="primary-subject">Human-Computer Interaction (cs.HC)</span>; Emerging Technologies (cs.ET); Robotics (cs.RO) </div> <p class='mathjax'> The integration of conversational agents into our daily lives has become increasingly common, yet many of these agents cannot engage in deep interactions with humans. Despite this, there is a noticeable shortage of datasets that capture multimodal information from human-robot interaction dialogues. To address this gap, we have recorded a novel multimodal dataset (MERCI) that encompasses rich embodied interaction data. The process involved asking participants to complete a questionnaire and gathering their profiles on ten topics, such as hobbies and favorite music. Subsequently, we initiated conversations between the robot and the participants, leveraging GPT-4 to generate contextually appropriate responses based on the participant's profile and emotional state, as determined by facial expression recognition and sentiment analysis. Automatic and user evaluations were conducted to assess the overall quality of the collected data. The results of both evaluations indicated a high level of naturalness, engagement, fluency, consistency, and relevance in the conversation, as well as the robot's ability to provide empathetic responses. It is worth noting that the dataset is derived from genuine interactions with the robot, involving participants who provided personal information and conveyed actual emotions. </p> </div> </dd> </dl> <div class='paging'>Total of 9 entries </div> <div class='morefewer'>Showing up to 2000 entries per page: <a href=/list/cs.ET/new?skip=0&show=1000 rel="nofollow"> fewer</a> | <span style="color: #454545">more</span> | <span style="color: #454545">all</span> </div> </div> </div> </div> </main> <footer style="clear: both;"> <div class="columns is-desktop" role="navigation" aria-label="Secondary" style="margin: -0.75em -0.75em 0.75em -0.75em"> <!-- Macro-Column 1 --> <div class="column" style="padding: 0;"> <div class="columns"> <div class="column"> <ul style="list-style: none; line-height: 2;"> <li><a href="https://info.arxiv.org/about">About</a></li> <li><a href="https://info.arxiv.org/help">Help</a></li> </ul> </div> <div class="column"> <ul style="list-style: none; line-height: 2;"> <li> <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512" class="icon filter-black" role="presentation"><title>contact arXiv</title><desc>Click here to contact arXiv</desc><path d="M502.3 190.8c3.9-3.1 9.7-.2 9.7 4.7V400c0 26.5-21.5 48-48 48H48c-26.5 0-48-21.5-48-48V195.6c0-5 5.7-7.8 9.7-4.7 22.4 17.4 52.1 39.5 154.1 113.6 21.1 15.4 56.7 47.8 92.2 47.6 35.7.3 72-32.8 92.3-47.6 102-74.1 131.6-96.3 154-113.7zM256 320c23.2.4 56.6-29.2 73.4-41.4 132.7-96.3 142.8-104.7 173.4-128.7 5.8-4.5 9.2-11.5 9.2-18.9v-19c0-26.5-21.5-48-48-48H48C21.5 64 0 85.5 0 112v19c0 7.4 3.4 14.3 9.2 18.9 30.6 23.9 40.7 32.4 173.4 128.7 16.8 12.2 50.2 41.8 73.4 41.4z"/></svg> <a href="https://info.arxiv.org/help/contact.html"> Contact</a> </li> <li> <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512" class="icon filter-black" role="presentation"><title>subscribe to arXiv mailings</title><desc>Click here to subscribe</desc><path d="M476 3.2L12.5 270.6c-18.1 10.4-15.8 35.6 2.2 43.2L121 358.4l287.3-253.2c5.5-4.9 13.3 2.6 8.6 8.3L176 407v80.5c0 23.6 28.5 32.9 42.5 15.8L282 426l124.6 52.2c14.2 6 30.4-2.9 33-18.2l72-432C515 7.8 493.3-6.8 476 3.2z"/></svg> <a href="https://info.arxiv.org/help/subscribe"> Subscribe</a> </li> </ul> </div> </div> </div> <!-- End Macro-Column 1 --> <!-- Macro-Column 2 --> <div class="column" style="padding: 0;"> <div class="columns"> <div class="column"> <ul style="list-style: none; line-height: 2;"> <li><a href="https://info.arxiv.org/help/license/index.html">Copyright</a></li> <li><a href="https://info.arxiv.org/help/policies/privacy_policy.html">Privacy Policy</a></li> </ul> </div> <div class="column sorry-app-links"> <ul style="list-style: none; line-height: 2;"> <li><a href="https://info.arxiv.org/help/web_accessibility.html">Web Accessibility Assistance</a></li> <li> <p class="help"> <a class="a11y-main-link" href="https://status.arxiv.org" target="_blank">arXiv Operational Status <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 256 512" class="icon filter-dark_grey" role="presentation"><path d="M224.3 273l-136 136c-9.4 9.4-24.6 9.4-33.9 0l-22.6-22.6c-9.4-9.4-9.4-24.6 0-33.9l96.4-96.4-96.4-96.4c-9.4-9.4-9.4-24.6 0-33.9L54.3 103c9.4-9.4 24.6-9.4 33.9 0l136 136c9.5 9.4 9.5 24.6.1 34z"/></svg></a><br> Get status notifications via <a class="is-link" href="https://subscribe.sorryapp.com/24846f03/email/new" target="_blank"><svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512" class="icon filter-black" role="presentation"><path d="M502.3 190.8c3.9-3.1 9.7-.2 9.7 4.7V400c0 26.5-21.5 48-48 48H48c-26.5 0-48-21.5-48-48V195.6c0-5 5.7-7.8 9.7-4.7 22.4 17.4 52.1 39.5 154.1 113.6 21.1 15.4 56.7 47.8 92.2 47.6 35.7.3 72-32.8 92.3-47.6 102-74.1 131.6-96.3 154-113.7zM256 320c23.2.4 56.6-29.2 73.4-41.4 132.7-96.3 142.8-104.7 173.4-128.7 5.8-4.5 9.2-11.5 9.2-18.9v-19c0-26.5-21.5-48-48-48H48C21.5 64 0 85.5 0 112v19c0 7.4 3.4 14.3 9.2 18.9 30.6 23.9 40.7 32.4 173.4 128.7 16.8 12.2 50.2 41.8 73.4 41.4z"/></svg>email</a> or <a class="is-link" href="https://subscribe.sorryapp.com/24846f03/slack/new" target="_blank"><svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 512" class="icon filter-black" role="presentation"><path d="M94.12 315.1c0 25.9-21.16 47.06-47.06 47.06S0 341 0 315.1c0-25.9 21.16-47.06 47.06-47.06h47.06v47.06zm23.72 0c0-25.9 21.16-47.06 47.06-47.06s47.06 21.16 47.06 47.06v117.84c0 25.9-21.16 47.06-47.06 47.06s-47.06-21.16-47.06-47.06V315.1zm47.06-188.98c-25.9 0-47.06-21.16-47.06-47.06S139 32 164.9 32s47.06 21.16 47.06 47.06v47.06H164.9zm0 23.72c25.9 0 47.06 21.16 47.06 47.06s-21.16 47.06-47.06 47.06H47.06C21.16 243.96 0 222.8 0 196.9s21.16-47.06 47.06-47.06H164.9zm188.98 47.06c0-25.9 21.16-47.06 47.06-47.06 25.9 0 47.06 21.16 47.06 47.06s-21.16 47.06-47.06 47.06h-47.06V196.9zm-23.72 0c0 25.9-21.16 47.06-47.06 47.06-25.9 0-47.06-21.16-47.06-47.06V79.06c0-25.9 21.16-47.06 47.06-47.06 25.9 0 47.06 21.16 47.06 47.06V196.9zM283.1 385.88c25.9 0 47.06 21.16 47.06 47.06 0 25.9-21.16 47.06-47.06 47.06-25.9 0-47.06-21.16-47.06-47.06v-47.06h47.06zm0-23.72c-25.9 0-47.06-21.16-47.06-47.06 0-25.9 21.16-47.06 47.06-47.06h117.84c25.9 0 47.06 21.16 47.06 47.06 0 25.9-21.16 47.06-47.06 47.06H283.1z"/></svg>slack</a> </p> </li> </ul> </div> </div> </div> <!-- end MetaColumn 2 --> <!-- End Macro-Column 2 --> </div> </footer> </div> <script src="/static/base/1.0.1/js/member_acknowledgement.js"></script> </body> </html>