




Working Papers
Sorry, no publications matched your criteria.
Under Review
Xudong Han, Ning Guo, Ronghan Xu, Chaoyang Song, Fang Wan
Anchoring Morphological Representations Unlocks Latent Proprioception in Soft Robots Online Forthcoming
Forthcoming, (Submitted to IEEE Transactions on Robotics).
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Status - Under Review
@online{Han2025AnchoringMorphological,
title = {Anchoring Morphological Representations Unlocks Latent Proprioception in Soft Robots},
author = {Xudong Han and Ning Guo and Ronghan Xu and Chaoyang Song and Fang Wan},
url = {https://github.com/ancorasir/ProSoRo},
year = {2025},
date = {2025-03-14},
abstract = {This research addresses the need for robust proprioceptive methods that capture the continuous deformations of soft robots without relying on multiple sensors that hinder compliance. We propose a bio-inspired strategy called textit{latent proprioception}, which anchors the robot's overall deformation state to a single internal reference frame tracked by a miniature onboard camera. Through a multi-modal neural network trained on simulated and real data, we unify motion, force, and shape measurements into a shared representation in textit{latent codes}, inferring unseen states from readily measured signals. Our experimental results show that this approach accurately reconstructs full-body deformations and forces from minimal sensing data, enabling soft robots to adapt to complex object manipulation or safe human interaction tasks. The proposed framework exemplifies how biological principles can inform and enhance robotics by reducing sensor complexity and preserving mechanical flexibility. We anticipate that such hybrid system codesign will advance robotic capabilities, deepen our understanding of natural movement, and potentially translate back into healthcare and wearable technologies for living beings. This work paves the way for soft robots endowed with greater autonomy and resilience. All codes are available at GitHub: https://github.com/ancorasir/ProSoRo.},
note = {Submitted to IEEE Transactions on Robotics},
keywords = {Authorship - Corresponding, Status - Under Review},
pubstate = {forthcoming},
tppubtype = {online}
}
Xi Xia, Xingxing Chen, Junli Shi, Zhibin Li, Bingfa Jiang, Kaixi Huang, Mengxue Guo, Zeyun Yang, Zelong Liao, Chaoyang Song, Chuanfei Guo
Microstructure-Enabled Tough Adhesion and Enhanced Sensing Online Forthcoming
Forthcoming, (Submitted to Matter).
Abstract | BibTeX | Tags: Authorship - Co-Author, Status - Under Review
@online{Xia2025MicrostructureEnabled,
title = {Microstructure-Enabled Tough Adhesion and Enhanced Sensing},
author = {Xi Xia and Xingxing Chen and Junli Shi and Zhibin Li and Bingfa Jiang and Kaixi Huang and Mengxue Guo and Zeyun Yang and Zelong Liao and Chaoyang Song and Chuanfei Guo},
year = {2025},
date = {2025-01-13},
abstract = {Skin-like soft sensors are a key technology for humanoid robots and wearables. Achieving both robust interfaces and promoted sensing performances in soft sensors may enable their applications in extreme mechanical conditions of high shear and large strain. However, introducing tough adhesion to the interfaces in a sensor often compromises its sensing properties. Here, we use micropillars of hyperbranched polyurethane with a diameter smaller than its length of flaw sensitivity serving as an adhesion layer for exceptional mechanical stability, and also as an adaptive spacer for enhanced sensing properties. We show a strong size effect of the structures to toughen the interface, with ultrahigh interfacial toughness up to 5095 J m-2 at a pillar diameter of 50 μm, which is one order of magnitude higher than the state-of-the-arts results. As a spacer, the micropillars provide enhanced sensitivity, adaptive limit of detection, rapid response to the acoustic range by decreasing the stiffness via elastic buckling. The sensors are ideal for the manipulation of heavy objects in humanoid robots and other applications. },
note = {Submitted to Matter},
keywords = {Authorship - Co-Author, Status - Under Review},
pubstate = {forthcoming},
tppubtype = {online}
}
Victor-Louis De Gusseme, Thomas Lips, Remko Proesmans, Julius Hietala, Giwan Lee, Jiyoung Choi, Jeongil Choi, Geon Kim, Phayuth Yonrith, Domen Tabernik, Andrej Gams, Peter Nimac, Matej Urbas, Jon Muhovic, Danijel Skocaj, Matija Mavsar, Hyojeong Yu, Minseo Kwon, Young J. Kim, Yang Cong, Ronghan Chen, Yu Ren, Supeng Diao, Jiawei Weng, Jiayue Liu, Haoran Sun, Linhan Yang, Zeqing Zhang, Ning Guo, Lei Yang, Fang Wan, Chaoyang Song, Jia Pan, Yixiang Jin, Yong A, Jun Shi, Dingzhe Li, Yong Yang, Kakeru Yamasaki, Takumi Kajiwara, Yuki Nakadera, Krati Saxena, Tomohiro Shibata, Chongkun Xia, Kai Mo, Yanzhao Yu, Qihao Lin, Binqiang Ma, Uihun Sagong, JungHyun Choi, JeongHyun Park, Dongwoo Lee, Yeongmin Kim, Myun Joong Hwang, Yusuke Kuribayashi, Naoki Hiratsuka, Daisuke Tanaka, Solvi Arnold, Kimitoshi Yamazaki, Carlos Mateo-Agullo, Andreas Verleysen, Francis wyffels
A Dataset and Benchmark for Robotic Cloth Unfolding Grasp Selection: The ICRA 2024 Cloth Competition Online Forthcoming
Forthcoming, (Submitted to The International Journal of Robotics Research).
Abstract | Links | BibTeX | Tags: Authorship - Co-Author, Status - Under Review
@online{DeGusseme2024BenchmarkingGrasp,
title = {A Dataset and Benchmark for Robotic Cloth Unfolding Grasp Selection: The ICRA 2024 Cloth Competition},
author = {Victor-Louis De Gusseme and Thomas Lips and Remko Proesmans and Julius Hietala and Giwan Lee and Jiyoung Choi and Jeongil Choi and Geon Kim and Phayuth Yonrith and Domen Tabernik and Andrej Gams and Peter Nimac and Matej Urbas and Jon Muhovic and Danijel Skocaj and Matija Mavsar and Hyojeong Yu and Minseo Kwon and Young J. Kim and Yang Cong and Ronghan Chen and Yu Ren and Supeng Diao and Jiawei Weng and Jiayue Liu and Haoran Sun and Linhan Yang and Zeqing Zhang and Ning Guo and Lei Yang and Fang Wan and Chaoyang Song and Jia Pan and Yixiang Jin and Yong A and Jun Shi and Dingzhe Li and Yong Yang and Kakeru Yamasaki and Takumi Kajiwara and Yuki Nakadera and Krati Saxena and Tomohiro Shibata and Chongkun Xia and Kai Mo and Yanzhao Yu and Qihao Lin and Binqiang Ma and Uihun Sagong and JungHyun Choi and JeongHyun Park and Dongwoo Lee and Yeongmin Kim and Myun Joong Hwang and Yusuke Kuribayashi and Naoki Hiratsuka and Daisuke Tanaka and Solvi Arnold and Kimitoshi Yamazaki and Carlos Mateo-Agullo and Andreas Verleysen and Francis wyffels},
url = {https://airo.ugent.be/cloth_competition/},
year = {2025},
date = {2025-01-10},
abstract = {Robotic cloth manipulation suffers from a lack of standardized benchmarks and shared datasets for evaluating and comparing different approaches. To address this, we organized the ICRA 2024 Cloth Competition, a unique head-to-head evaluation focused on grasp pose selection for cloth unfolding. Eleven diverse teams competed with a shared dual-arm robot, utilizing our publicly released dataset of real-world robotic cloth unfolding attempts. We expanded this dataset with 176 live evaluation trials, which now encompasses 679 unfolding demonstrations across 34 garments. The competition established a key benchmark and reference for robotic cloth manipulation. Analysis revealed a significant discrepancy between competition performance and prior work, underscoring the importance of independent out-of-the-lab evaluation in robotic cloth manipulation. The resulting dataset, one of the most comprehensive collections of real-world robotic cloth manipulation data, is a valuable resource for developing and evaluating grasp selection methods, particularly for learning-based approaches. It can serve as a foundation for future benchmarks and drive further progress in data-driven robotic cloth manipulation.},
note = {Submitted to The International Journal of Robotics Research},
keywords = {Authorship - Co-Author, Status - Under Review},
pubstate = {forthcoming},
tppubtype = {online}
}
Xudong Han, Haoran Sun, Ning Guo, Sheng Ge, Jia Pan, Fang Wan, Chaoyang Song
Transferrable Robot Skills Approaching Human-Level Versatility in Automated Task Board Manipulation Online Forthcoming
Forthcoming, (Submitted to IEEE Robotics and Automation Practice for the Special Collection "Autonomous Robotic Grasping and Manipulation in Real-World Applications.").
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Status - Under Review
@online{Han2025TransferrableRobot,
title = {Transferrable Robot Skills Approaching Human-Level Versatility in Automated Task Board Manipulation},
author = {Xudong Han and Haoran Sun and Ning Guo and Sheng Ge and Jia Pan and Fang Wan and Chaoyang Song},
url = {https://msvc-dlrg.github.io/},
year = {2024},
date = {2024-12-15},
abstract = {Versatility in engineering means adaptability and multi-functionality. For robotic automation, it signifies the ability to handle diverse tasks, easily switch between different operations, and thrive in changing environments. The current gap lies in developing agreed-upon frameworks and metrics that are both quantitative and context-appropriate, capturing not just mechanical capabilities but also cognitive adaptability, integration complexity, and economic value.
In this paper, we present the Design and Learning Research Group's (DLRG) solution for the euROBIN Manipulation Skill Versatility Challenge (MSVC) at IROS 2024 in Abu Dhabi, UAE. The MSVC, held annually since 2021, is part of the euROBIN project that seeks to advance transferrable robot skills for the circular economy by autonomously performing tasks such as object localization, insertion, door operation, circuit probing, and cable management. We approached the standardized task board provided by event organizers that mimics industrial testing procedures by structurally decomposing the task into subtask skills. We created a custom dashboard with drag-and-drop code blocks to streamline development and adaptation, enabling rapid code refinement and task restructuring, complementing the default remote web platform that records the performance. Our system completed the task board in 28.2 sec in the lab (37.2 sec on-site), nearly tripling the efficiency over the averaged best time of 83.5 sec by previous teams and bringing performance closer to a human baseline of 16.3 sec. By implementing subtasks as reusable code blocks, we facilitated the transfer of these skills to a distinct scenario, successfully removing a battery from a smoke detector with minimal reconfiguration.
We also provide suggestions for future research and industrial practice on robotic versatility in manipulation automation through globalized competitions, interdisciplinary efforts, standardization initiatives, and iterative testing in the real world to ensure that it is measured in a meaningful, actionable way.},
note = {Submitted to IEEE Robotics and Automation Practice for the Special Collection "Autonomous Robotic Grasping and Manipulation in Real-World Applications."},
keywords = {Authorship - Corresponding, Status - Under Review},
pubstate = {forthcoming},
tppubtype = {online}
}
In this paper, we present the Design and Learning Research Group's (DLRG) solution for the euROBIN Manipulation Skill Versatility Challenge (MSVC) at IROS 2024 in Abu Dhabi, UAE. The MSVC, held annually since 2021, is part of the euROBIN project that seeks to advance transferrable robot skills for the circular economy by autonomously performing tasks such as object localization, insertion, door operation, circuit probing, and cable management. We approached the standardized task board provided by event organizers that mimics industrial testing procedures by structurally decomposing the task into subtask skills. We created a custom dashboard with drag-and-drop code blocks to streamline development and adaptation, enabling rapid code refinement and task restructuring, complementing the default remote web platform that records the performance. Our system completed the task board in 28.2 sec in the lab (37.2 sec on-site), nearly tripling the efficiency over the averaged best time of 83.5 sec by previous teams and bringing performance closer to a human baseline of 16.3 sec. By implementing subtasks as reusable code blocks, we facilitated the transfer of these skills to a distinct scenario, successfully removing a battery from a smoke detector with minimal reconfiguration.
We also provide suggestions for future research and industrial practice on robotic versatility in manipulation automation through globalized competitions, interdisciplinary efforts, standardization initiatives, and iterative testing in the real world to ensure that it is measured in a meaningful, actionable way.
Yuping Gu, Bangchao Huang, Haoran Sun, Ronghan Xu, Jiayi Yin, Wei Zhang, Fang Wan, Jia Pan, Chaoyang Song
One-DoF Robotic Design of Overconstrained Limbs with Energy-Efficient, Self-Collision-Free Motion Online Forthcoming
Forthcoming, (Submitted to Fundamental Research).
Abstract | BibTeX | Tags: Authorship - Corresponding, Status - Under Review
@online{Gu2024OCLimbDesign,
title = {One-DoF Robotic Design of Overconstrained Limbs with Energy-Efficient, Self-Collision-Free Motion},
author = {Yuping Gu and Bangchao Huang and Haoran Sun and Ronghan Xu and Jiayi Yin and Wei Zhang and Fang Wan and Jia Pan and Chaoyang Song},
year = {2024},
date = {2024-10-27},
abstract = {While it is common to build robotic limbs with multiple degrees of freedom (DoF) inspired by nature, single DoF design remains fundamental, providing benefits including, but not limited to, simplicity, robustness, cost-effectiveness, and efficiency. Mechanisms, especially those with multiple links and revolute joints connected in closed loops, play an enabling factor in introducing motion diversity for 1-DoF systems, which are usually constrained by self-collision during a full-cycle range of motion. This study presents a novel computational approach to designing 1-DoF overconstrained robotic limbs for desired spatial trajectory while achieving energy-efficient, self-collision-free motion in full-cycle rotations. Firstly, we present the geometric optimization problem of linkage-based robotic limbs in a generalized formulation for self-collision-free design. Next, we formulate the spatial trajectory generation problem with the overconstrained linkages by optimizing the similarity and dynamic-related metrics. We further optimize the geometric shape of the overconstrained linkage to ensure smooth and collision-free motion driven by a single actuator. We validated our proposed method through various experiments, including personalized automata and bio-inspired hexapod robots. The resulting hexapod robot with overconstrained robotic limbs showed outstanding energy efficiency in forward walking.},
note = {Submitted to Fundamental Research},
keywords = {Authorship - Corresponding, Status - Under Review},
pubstate = {forthcoming},
tppubtype = {online}
}

Fang Wan, Zheng Wang, Wei Zhang, Chaoyang Song
SeeThruFinger: See and Grasp Anything via a Multi-Modal Soft Touch Online Forthcoming
Forthcoming, (Submitted to IEEE Transactions on Robotics).
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Status - Under Review
@online{Wan2024SeeThruFinger,
title = {SeeThruFinger: See and Grasp Anything via a Multi-Modal Soft Touch},
author = {Fang Wan and Zheng Wang and Wei Zhang and Chaoyang Song},
doi = {10.48550/arXiv.2312.09822},
year = {2024},
date = {2024-09-20},
abstract = {We present SeeThruFinger, a Vision-Based Tactile Sensing (VBTS) architecture using a markerless See-Thru-Network. It achieves simultaneous visual perception and tactile sensing while providing omni-directional, adaptive grasping for manipulation. Multi-modal perception of intrinsic and extrinsic interactions is critical in building intelligent robots that learn. Instead of adding various sensors for different modalities, a preferred solution is to integrate them into one elegant and coherent design, which is a challenging task. This study leverages the in-finger vision to inpaint occluded regions of the external environment, achieving coherent scene reconstruction for visual perception. By tracking real-time segmentation of the Soft Polyhedral Network’s large-scale deformation, we achieved real- time markerless tactile sensing of 6D forces and torques. We demonstrate the capable performances of the SeeThruFinger for reactive grasping without using external cameras or dedicated force and torque sensors on the fingertips. Using the inpainted scene and the deformation mask, we further demonstrate the multi-modal performance of the SeeThruFinger architecture to simultaneously achieve various capabilities, including but not limited to scene inpainting, object detection, depth sensing, scene segmentation, masked deformation tracking, 6D force-and-torque sensing, and contact event detection, all within a single input from the in-finger vision of the See-Thru-Network in a markerless way. All codes are available at https://github.com/ ancorasir/SeeThruFinger.},
note = {Submitted to IEEE Transactions on Robotics},
keywords = {Authorship - Corresponding, Status - Under Review},
pubstate = {forthcoming},
tppubtype = {online}
}
Chengxiao Dong, Yu Pan, Xuanyi Dai, Edmond Ho Nang, Chaoyang Song, Fang Wan
Enhancing Full-Arch Intraoral Measurement with Robotic Process Automation Online Forthcoming
Forthcoming, (Submitted to Journal of Bionic Engineering).
Abstract | BibTeX | Tags: Authorship - Co-Author, Status - Under Review
@online{Dong2024EnhancingFull,
title = {Enhancing Full-Arch Intraoral Measurement with Robotic Process Automation},
author = {Chengxiao Dong and Yu Pan and Xuanyi Dai and Edmond Ho Nang and Chaoyang Song and Fang Wan},
year = {2024},
date = {2024-09-12},
abstract = {Intraoral scanning has become integral to digital workflows in dental implantology, offering a more efficient and comfortable alternative to conventional impression techniques. For complete edentulism, accurate scanning is crucial to successful full-arch dental implant rehabilitation. However, the absence of well-defined anatomical landmarks can lead to cumulative errors during merging sequential scans, often surpassing acceptable thresholds. Current mitigation strategies rely on manual adjustments in computer-aided design (CAD) software, a time-intensive process that depends heavily on the operator's expertise. This study presents a novel textit{segment-match-correct} robotic process automation (RPA) workflow to enhance full-arch intraoral scans' positioning accuracy and efficiency. By leveraging 3D registration algorithms, the proposed method improves implant positioning accuracy while significantly reducing manual labor. To assess the robustness of this workflow, we simulated four types of noise to evaluate their impact on scanning errors. Our findings demonstrate that the RPA workflow reduces dentist workload from 5-8 minutes per scan to less than 1 minute (about 57 seconds) while achieving a lower linear error of 45.16 $pm$ 23.76 unit{micrometer}, outperforming traditional scanning methods. We could replicate linear and angular deviations observed in real-world scans by simulating cumulative errors. This workflow improves the accuracy and efficiency of complete-arch implant rehabilitation and provides a practical solution to reduce cumulative scanning errors. Additionally, the noise simulations offer valuable insights into the origins of these errors, further optimizing intraoral scanner performance.},
note = {Submitted to Journal of Bionic Engineering},
keywords = {Authorship - Co-Author, Status - Under Review},
pubstate = {forthcoming},
tppubtype = {online}
}
Journal Articles
Sorry, no publications matched your criteria.
Conference Papers
Sorry, no publications matched your criteria.
Extended Abstracts
Sorry, no publications matched your criteria.
Doctoral Thesis
Sorry, no publications matched your criteria.