




Working Papers
Fang Wan, Chaoyang Song
The Bionic Design and Learning of Overconstrained Robotic Limbs Working paper Forthcoming
Forthcoming.
BibTeX | Tags: Authorship - Corresponding, Special - Working Paper
@workingpaper{Wan2024BionicDL,
title = {The Bionic Design and Learning of Overconstrained Robotic Limbs},
author = {Fang Wan and Chaoyang Song},
keywords = {Authorship - Corresponding, Special - Working Paper},
pubstate = {forthcoming},
tppubtype = {workingpaper}
}
Under Review
Xudong Han, Ning Guo, Ronghan Xu, Chaoyang Song, Fang Wan
Anchoring Morphological Representations Unlocks Latent Proprioception in Soft Robots Online Forthcoming
Forthcoming, (Submitted to IEEE Transactions on Robotics).
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Status - Under Review
@online{Han2025AnchoringMorphological,
title = {Anchoring Morphological Representations Unlocks Latent Proprioception in Soft Robots},
author = {Xudong Han and Ning Guo and Ronghan Xu and Chaoyang Song and Fang Wan},
url = {https://github.com/ancorasir/ProSoRo},
year = {2025},
date = {2025-03-14},
abstract = {This research addresses the need for robust proprioceptive methods that capture the continuous deformations of soft robots without relying on multiple sensors that hinder compliance. We propose a bio-inspired strategy called textit{latent proprioception}, which anchors the robot's overall deformation state to a single internal reference frame tracked by a miniature onboard camera. Through a multi-modal neural network trained on simulated and real data, we unify motion, force, and shape measurements into a shared representation in textit{latent codes}, inferring unseen states from readily measured signals. Our experimental results show that this approach accurately reconstructs full-body deformations and forces from minimal sensing data, enabling soft robots to adapt to complex object manipulation or safe human interaction tasks. The proposed framework exemplifies how biological principles can inform and enhance robotics by reducing sensor complexity and preserving mechanical flexibility. We anticipate that such hybrid system codesign will advance robotic capabilities, deepen our understanding of natural movement, and potentially translate back into healthcare and wearable technologies for living beings. This work paves the way for soft robots endowed with greater autonomy and resilience. All codes are available at GitHub: https://github.com/ancorasir/ProSoRo.},
note = {Submitted to IEEE Transactions on Robotics},
keywords = {Authorship - Corresponding, Status - Under Review},
pubstate = {forthcoming},
tppubtype = {online}
}
Xudong Han, Haoran Sun, Ning Guo, Sheng Ge, Jia Pan, Fang Wan, Chaoyang Song
Transferrable Robot Skills Approaching Human-Level Versatility in Automated Task Board Manipulation Online Forthcoming
Forthcoming, (Submitted to IEEE Robotics and Automation Practice for the Special Collection "Autonomous Robotic Grasping and Manipulation in Real-World Applications.").
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Status - Under Review
@online{Han2025TransferrableRobot,
title = {Transferrable Robot Skills Approaching Human-Level Versatility in Automated Task Board Manipulation},
author = {Xudong Han and Haoran Sun and Ning Guo and Sheng Ge and Jia Pan and Fang Wan and Chaoyang Song},
url = {https://msvc-dlrg.github.io/},
year = {2024},
date = {2024-12-15},
abstract = {Versatility in engineering means adaptability and multi-functionality. For robotic automation, it signifies the ability to handle diverse tasks, easily switch between different operations, and thrive in changing environments. The current gap lies in developing agreed-upon frameworks and metrics that are both quantitative and context-appropriate, capturing not just mechanical capabilities but also cognitive adaptability, integration complexity, and economic value.
In this paper, we present the Design and Learning Research Group's (DLRG) solution for the euROBIN Manipulation Skill Versatility Challenge (MSVC) at IROS 2024 in Abu Dhabi, UAE. The MSVC, held annually since 2021, is part of the euROBIN project that seeks to advance transferrable robot skills for the circular economy by autonomously performing tasks such as object localization, insertion, door operation, circuit probing, and cable management. We approached the standardized task board provided by event organizers that mimics industrial testing procedures by structurally decomposing the task into subtask skills. We created a custom dashboard with drag-and-drop code blocks to streamline development and adaptation, enabling rapid code refinement and task restructuring, complementing the default remote web platform that records the performance. Our system completed the task board in 28.2 sec in the lab (37.2 sec on-site), nearly tripling the efficiency over the averaged best time of 83.5 sec by previous teams and bringing performance closer to a human baseline of 16.3 sec. By implementing subtasks as reusable code blocks, we facilitated the transfer of these skills to a distinct scenario, successfully removing a battery from a smoke detector with minimal reconfiguration.
We also provide suggestions for future research and industrial practice on robotic versatility in manipulation automation through globalized competitions, interdisciplinary efforts, standardization initiatives, and iterative testing in the real world to ensure that it is measured in a meaningful, actionable way.},
note = {Submitted to IEEE Robotics and Automation Practice for the Special Collection "Autonomous Robotic Grasping and Manipulation in Real-World Applications."},
keywords = {Authorship - Corresponding, Status - Under Review},
pubstate = {forthcoming},
tppubtype = {online}
}
In this paper, we present the Design and Learning Research Group's (DLRG) solution for the euROBIN Manipulation Skill Versatility Challenge (MSVC) at IROS 2024 in Abu Dhabi, UAE. The MSVC, held annually since 2021, is part of the euROBIN project that seeks to advance transferrable robot skills for the circular economy by autonomously performing tasks such as object localization, insertion, door operation, circuit probing, and cable management. We approached the standardized task board provided by event organizers that mimics industrial testing procedures by structurally decomposing the task into subtask skills. We created a custom dashboard with drag-and-drop code blocks to streamline development and adaptation, enabling rapid code refinement and task restructuring, complementing the default remote web platform that records the performance. Our system completed the task board in 28.2 sec in the lab (37.2 sec on-site), nearly tripling the efficiency over the averaged best time of 83.5 sec by previous teams and bringing performance closer to a human baseline of 16.3 sec. By implementing subtasks as reusable code blocks, we facilitated the transfer of these skills to a distinct scenario, successfully removing a battery from a smoke detector with minimal reconfiguration.
We also provide suggestions for future research and industrial practice on robotic versatility in manipulation automation through globalized competitions, interdisciplinary efforts, standardization initiatives, and iterative testing in the real world to ensure that it is measured in a meaningful, actionable way.
Yuping Gu, Bangchao Huang, Haoran Sun, Ronghan Xu, Jiayi Yin, Wei Zhang, Fang Wan, Jia Pan, Chaoyang Song
One-DoF Robotic Design of Overconstrained Limbs with Energy-Efficient, Self-Collision-Free Motion Online Forthcoming
Forthcoming, (Submitted to Fundamental Research).
Abstract | BibTeX | Tags: Authorship - Corresponding, Status - Under Review
@online{Gu2024OCLimbDesign,
title = {One-DoF Robotic Design of Overconstrained Limbs with Energy-Efficient, Self-Collision-Free Motion},
author = {Yuping Gu and Bangchao Huang and Haoran Sun and Ronghan Xu and Jiayi Yin and Wei Zhang and Fang Wan and Jia Pan and Chaoyang Song},
year = {2024},
date = {2024-10-27},
abstract = {While it is common to build robotic limbs with multiple degrees of freedom (DoF) inspired by nature, single DoF design remains fundamental, providing benefits including, but not limited to, simplicity, robustness, cost-effectiveness, and efficiency. Mechanisms, especially those with multiple links and revolute joints connected in closed loops, play an enabling factor in introducing motion diversity for 1-DoF systems, which are usually constrained by self-collision during a full-cycle range of motion. This study presents a novel computational approach to designing 1-DoF overconstrained robotic limbs for desired spatial trajectory while achieving energy-efficient, self-collision-free motion in full-cycle rotations. Firstly, we present the geometric optimization problem of linkage-based robotic limbs in a generalized formulation for self-collision-free design. Next, we formulate the spatial trajectory generation problem with the overconstrained linkages by optimizing the similarity and dynamic-related metrics. We further optimize the geometric shape of the overconstrained linkage to ensure smooth and collision-free motion driven by a single actuator. We validated our proposed method through various experiments, including personalized automata and bio-inspired hexapod robots. The resulting hexapod robot with overconstrained robotic limbs showed outstanding energy efficiency in forward walking.},
note = {Submitted to Fundamental Research},
keywords = {Authorship - Corresponding, Status - Under Review},
pubstate = {forthcoming},
tppubtype = {online}
}

Fang Wan, Zheng Wang, Wei Zhang, Chaoyang Song
SeeThruFinger: See and Grasp Anything via a Multi-Modal Soft Touch Online Forthcoming
Forthcoming, (Submitted to IEEE Transactions on Robotics).
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Status - Under Review
@online{Wan2024SeeThruFinger,
title = {SeeThruFinger: See and Grasp Anything via a Multi-Modal Soft Touch},
author = {Fang Wan and Zheng Wang and Wei Zhang and Chaoyang Song},
doi = {10.48550/arXiv.2312.09822},
year = {2024},
date = {2024-09-20},
abstract = {We present SeeThruFinger, a Vision-Based Tactile Sensing (VBTS) architecture using a markerless See-Thru-Network. It achieves simultaneous visual perception and tactile sensing while providing omni-directional, adaptive grasping for manipulation. Multi-modal perception of intrinsic and extrinsic interactions is critical in building intelligent robots that learn. Instead of adding various sensors for different modalities, a preferred solution is to integrate them into one elegant and coherent design, which is a challenging task. This study leverages the in-finger vision to inpaint occluded regions of the external environment, achieving coherent scene reconstruction for visual perception. By tracking real-time segmentation of the Soft Polyhedral Network’s large-scale deformation, we achieved real- time markerless tactile sensing of 6D forces and torques. We demonstrate the capable performances of the SeeThruFinger for reactive grasping without using external cameras or dedicated force and torque sensors on the fingertips. Using the inpainted scene and the deformation mask, we further demonstrate the multi-modal performance of the SeeThruFinger architecture to simultaneously achieve various capabilities, including but not limited to scene inpainting, object detection, depth sensing, scene segmentation, masked deformation tracking, 6D force-and-torque sensing, and contact event detection, all within a single input from the in-finger vision of the See-Thru-Network in a markerless way. All codes are available at https://github.com/ ancorasir/SeeThruFinger.},
note = {Submitted to IEEE Transactions on Robotics},
keywords = {Authorship - Corresponding, Status - Under Review},
pubstate = {forthcoming},
tppubtype = {online}
}
Journal Articles
Yujian Dong, Tianyu Wu, Chaoyang Song
Optimizing Robotic Manipulation with Decision-RWKV: A Recurrent Sequence Modeling Approach for Lifelong Learning Journal Article
In: Journal of Computing and Information Science in Engineering, vol. 23, no. 3, pp. 031004, 2025.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q2, Jour - J. Comput. Inf. Sci. Eng. (JCISE)
@article{Dong2024OptimizingRobotic,
title = {Optimizing Robotic Manipulation with Decision-RWKV: A Recurrent Sequence Modeling Approach for Lifelong Learning},
author = {Yujian Dong and Tianyu Wu and Chaoyang Song},
url = {https://doi.org/10.48550/arXiv.2407.16306},
doi = {10.1115/1.4067524},
year = {2025},
date = {2025-01-27},
urldate = {2025-01-27},
journal = {Journal of Computing and Information Science in Engineering},
volume = {23},
number = {3},
pages = {031004},
abstract = {Models based on the Transformer architecture have seen widespread application across fields such as natural language processing (NLP), computer vision, and robotics, with large language models (LLMs) like ChatGPT revolutionizing machine understanding of human language and demonstrating impressive memory and reproduction capabilities. Traditional machine learning algorithms struggle with catastrophic forgetting, which is detrimental to the diverse and generalized abilities required for robotic deployment. This paper investigates the Receptance Weighted Key Value (RWKV) framework, known for its advanced capabilities in efficient and effective sequence modeling, integration with the decision transformer (DT), and experience replay architectures. It focuses on potential performance enhancements in sequence decision-making and lifelong robotic learning tasks. We introduce the Decision-RWKV (DRWKV) model and conduct extensive experiments using the D4RL database within the OpenAI Gym environment and on the D’Claw platform to assess the DRWKV model's performance in single-task tests and lifelong learning scenarios, showcasing its ability to handle multiple subtasks efficiently. The code for all algorithms, training, and image rendering in this study is open-sourced at https://github.com/ancorasir/DecisionRWKV. },
keywords = {Authorship - Corresponding, JCR Q2, Jour - J. Comput. Inf. Sci. Eng. (JCISE)},
pubstate = {published},
tppubtype = {article}
}
Xiaobo Liu, Xudong Han, Wei Hong, Fang Wan, Chaoyang Song
Proprioceptive Learning with Soft Polyhedral Networks Journal Article
In: The International Journal of Robotics Research, vol. 43, no. 12, pp. 1916-1935, 2024.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q1, Jour - Int. J. Robot. Res. (IJRR)
@article{Liu20242024ProprioceptiveLearning,
title = {Proprioceptive Learning with Soft Polyhedral Networks},
author = {Xiaobo Liu and Xudong Han and Wei Hong and Fang Wan and Chaoyang Song},
doi = {10.1177/02783649241238765},
year = {2024},
date = {2024-10-07},
urldate = {2024-03-13},
journal = {The International Journal of Robotics Research},
volume = {43},
number = {12},
pages = {1916-1935},
abstract = {Proprioception is the “sixth sense” that detects limb postures with motor neurons. It requires a natural integration between the musculoskeletal systems and sensory receptors, which is challenging among modern robots that aim for lightweight, adaptive, and sensitive designs at low costs in mechanical design and algorithmic computation. Here, we present the Soft Polyhedral Network with an embedded vision for physical interactions, capable of adaptive kinesthesia and viscoelastic proprioception by learning kinetic features. This design enables passive adaptations to omni-directional interactions, visually captured by a miniature high-speed motion-tracking system embedded inside for proprioceptive learning. The results show that the soft network can infer real-time 6D forces and torques with accuracies of 0.25/0.24/0.35 N and 0.025/0.034/0.006 Nm in dynamic interactions. We also incorporate viscoelasticity in proprioception during static adaptation by adding a creep and relaxation modifier to refine the predicted results. The proposed soft network combines simplicity in design, omni-adaptation, and proprioceptive sensing with high accuracy, making it a versatile solution for robotics at a low material cost with more than one million use cycles for tasks such as sensitive and competitive grasping and touch-based geometry reconstruction. This study offers new insights into vision-based proprioception for soft robots in adaptive grasping, soft manipulation, and human-robot interaction.},
keywords = {Authorship - Corresponding, JCR Q1, Jour - Int. J. Robot. Res. (IJRR)},
pubstate = {published},
tppubtype = {article}
}
Ning Guo, Xudong Han, Shuqiao Zhong, Zhiyuan Zhou, Jian Lin, Fang Wan, Chaoyang Song
Reconstructing Soft Robotic Touch via In-Finger Vision Journal Article
In: Advanced Intelligent Systems, vol. 6, iss. October, no. 10, pp. 2400022, 2024.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Award - Front Cover, JCR Q1, Jour - Adv. Intell. Syst. (AIS)
@article{Guo2024ReconstructingSoft,
title = {Reconstructing Soft Robotic Touch via In-Finger Vision},
author = {Ning Guo and Xudong Han and Shuqiao Zhong and Zhiyuan Zhou and Jian Lin and Fang Wan and Chaoyang Song},
doi = {10.1002/aisy.202400022},
year = {2024},
date = {2024-10-01},
urldate = {2024-10-01},
journal = {Advanced Intelligent Systems},
volume = {6},
number = {10},
issue = {October},
pages = {2400022},
abstract = {Incorporating authentic tactile interactions into virtual environments presents a notable challenge for the emerging development of soft robotic metamaterials. In this study, a vision-based approach is introduced to learning proprioceptive interactions by simultaneously reconstructing the shape and touch of a soft robotic metamaterial (SRM) during physical engagements. The SRM design is optimized to the size of a finger with enhanced adaptability in 3D interactions while incorporating a see-through viewing field inside, which can be visually captured by a miniature camera underneath to provide a rich set of image features for touch digitization. Employing constrained geometric optimization, the proprioceptive process with aggregated multi-handles is modeled. This approach facilitates real-time, precise, and realistic estimations of the finger's mesh deformation within a virtual environment. Herein, a data-driven learning model is also proposed to estimate touch positions, achieving reliable results with impressive R2 scores of 0.9681, 0.9415, and 0.9541 along the x, y, and z axes. Furthermore, the robust performance of the proposed methods in touch-based human–cybernetic interfaces and human–robot collaborative grasping is demonstrated. In this study, the door is opened to future applications in touch-based digital twin interactions through vision-based soft proprioception.},
keywords = {Authorship - Corresponding, Award - Front Cover, JCR Q1, Jour - Adv. Intell. Syst. (AIS)},
pubstate = {published},
tppubtype = {article}
}
Xudong Han, Ning Guo, Yu Jie, He Wang, Fang Wan, Chaoyang Song
On Flange-Based 3D Hand-Eye Calibration for Soft Robotic Tactile Welding Journal Article
In: Measurement, vol. 238, iss. October, pp. 115376, 2024.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q1, Jour - Measurement (MEAS)
@article{Han2024OnFlange,
title = {On Flange-Based 3D Hand-Eye Calibration for Soft Robotic Tactile Welding},
author = {Xudong Han and Ning Guo and Yu Jie and He Wang and Fang Wan and Chaoyang Song},
doi = {10.1016/j.measurement.2024.115376},
year = {2024},
date = {2024-10-01},
urldate = {2024-10-01},
journal = {Measurement},
volume = {238},
issue = {October},
pages = {115376},
abstract = {This paper investigates the direct application of standardized designs on the robot for conducting robot hand–eye calibration by employing 3D scanners with collaborative robots. The well-established geometric features of the robot flange are exploited by directly capturing its point cloud data. In particular, an iterative method is proposed to facilitate point cloud processing towards a refined calibration outcome. Several extensive experiments are conducted over a range of collaborative robots, including Universal Robots UR5 & UR10 e-series, Franka Emika, and AUBO i5 using an industrial-grade 3D scanner Photoneo Phoxi S & M and a commercial-grade 3D scanner Microsoft Azure Kinect DK. Experimental results show that translational and rotational errors converge efficiently to less than 0.28 mm and 0.25 degrees, respectively, achieving a hand–eye calibration accuracy as high as the camera’s resolution, probing the hardware limit. A welding seam tracking system is presented, combining the flange-based calibration method with soft tactile sensing. The experiment results show that the system enables the robot to adjust its motion in real-time, ensuring consistent weld quality and paving the way for more efficient and adaptable manufacturing processes.},
keywords = {Authorship - Corresponding, JCR Q1, Jour - Measurement (MEAS)},
pubstate = {published},
tppubtype = {article}
}
Ning Guo, Xudong Han, Shuqiao Zhong, Zhiyuan Zhou, Jian Lin, Jiansheng Dai, Fang Wan, Chaoyang Song
Proprioceptive State Estimation for Amphibious Tactile Sensing Journal Article
In: IEEE Transactions on Robotics, vol. 40, iss. September, pp. 4684-4698, 2024.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q1, Jour - IEEE Trans. Robot. (T-RO)
@article{Guo2024ProprioceptiveState,
title = {Proprioceptive State Estimation for Amphibious Tactile Sensing},
author = {Ning Guo and Xudong Han and Shuqiao Zhong and Zhiyuan Zhou and Jian Lin and Jiansheng Dai and Fang Wan and Chaoyang Song},
doi = {10.1109/TRO.2024.3463509},
year = {2024},
date = {2024-09-18},
urldate = {2024-09-18},
journal = {IEEE Transactions on Robotics},
volume = {40},
issue = {September},
pages = {4684-4698},
abstract = {This article presents a novel vision-based proprioception approach for a soft robotic finger that can estimate and reconstruct tactile interactions in terrestrial and aquatic environments. The key to this system lies in the finger's unique metamaterial structure, which facilitates omnidirectional passive adaptation during grasping, protecting delicate objects across diverse scenarios. A compact in-finger camera captures high-framerate images of the finger's deformation during contact, extracting crucial tactile data in real time. We present a volumetric discretized model of the soft finger and use the geometry constraints captured by the camera to find the optimal estimation of the deformed shape. The approach is benchmarked using a motion capture system with sparse markers and a haptic device with dense measurements. Both results show state-of-the-art accuracy, with a median error of 1.96 mm for overall body deformation, corresponding to 2.1 % of the finger's length. More importantly, the state estimation is robust in both on-land and underwater environments as we demonstrate its usage for underwater object shape sensing. This combination of passive adaptation and real-time tactile sensing paves the way for amphibious robotic grasping applications.},
key = {2024-J-TRO-ProprioceptiveState},
keywords = {Authorship - Corresponding, JCR Q1, Jour - IEEE Trans. Robot. (T-RO)},
pubstate = {published},
tppubtype = {article}
}
Tianyu Wu, Yujian Dong, Xiaobo Liu, Xudong Han, Yang Xiao, Jinqi Wei, Fang Wan, Chaoyang Song
Vision-based Tactile Intelligence with Soft Robotic Metamaterial Journal Article
In: Materials & Design, vol. 238, iss. February, pp. 112629, 2024.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q1, Jour - Mat. Des. (MADE)
@article{Wu2024VisionBasedSRM,
title = {Vision-based Tactile Intelligence with Soft Robotic Metamaterial},
author = {Tianyu Wu and Yujian Dong and Xiaobo Liu and Xudong Han and Yang Xiao and Jinqi Wei and Fang Wan and Chaoyang Song},
doi = {10.1016/j.matdes.2024.112629},
year = {2024},
date = {2024-02-01},
urldate = {2024-02-01},
booktitle = {IEEE International Conference on Advanced Robotics and Mechatronics (ICARM2024)},
journal = {Materials & Design},
volume = {238},
issue = {February},
pages = {112629},
abstract = {Robotic metamaterials represent an innovative approach to creating synthetic structures that combine desired material characteristics with embodied intelligence, blurring the boundaries between materials and machinery. Inspired by the functional qualities of biological skin, integrating tactile intelligence into these materials has gained significant interest for research and practical applications. This study introduces a Soft Robotic Metamaterial (SRM) design featuring omnidirectional adaptability and superior tactile sensing, combining vision-based motion tracking and machine learning. The study compares two sensory integration methods to a state-of-the-art motion tracking system and force/torque sensor baseline: an internal-vision design with high frame rates and an external-vision design offering cost-effectiveness. The results demonstrate the internal-vision SRM design achieving an impressive tactile accuracy of 98.96%, enabling soft and adaptive tactile interactions, especially beneficial for dexterous robotic grasping. The external-vision design offers similar performance at a reduced cost and can be adapted for portability, enhancing material science education and robotic learning. This research significantly advances tactile sensing using vision-based motion tracking in soft robotic metamaterials, and the open-source availability on GitHub fosters collaboration and further exploration of this innovative technology (https://github.com/bionicdl-sustech/SoftRoboticTongs).},
keywords = {Authorship - Corresponding, JCR Q1, Jour - Mat. Des. (MADE)},
pubstate = {published},
tppubtype = {article}
}
Ning Guo, Xudong Han, Xiaobo Liu, Shuqiao Zhong, Zhiyuan Zhou, Jian Lin, Jiansheng Dai, Fang Wan, Chaoyang Song
Autoencoding a Soft Touch to Learn Grasping from On-land to Underwater Journal Article
In: Advanced Intelligent Systems, vol. 6, iss. January, no. 1, pp. 2300382, 2024.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Award - Front Cover, JCR Q1, Jour - Adv. Intell. Syst. (AIS)
@article{Guo2024AutoencodingA,
title = {Autoencoding a Soft Touch to Learn Grasping from On-land to Underwater},
author = {Ning Guo and Xudong Han and Xiaobo Liu and Shuqiao Zhong and Zhiyuan Zhou and Jian Lin and Jiansheng Dai and Fang Wan and Chaoyang Song},
doi = {10.1002/aisy.202300382},
year = {2024},
date = {2024-01-01},
urldate = {2024-01-01},
journal = {Advanced Intelligent Systems},
volume = {6},
number = {1},
issue = {January},
pages = {2300382},
abstract = {Robots play a critical role as the physical agent of human operators in exploring the ocean. However, it remains challenging to grasp objects reliably while fully submerging under a highly pressurized aquatic environment with little visible light, mainly due to the fluidic interference on the tactile mechanics between the finger and object surfaces. This study investigates the transferability of grasping knowledge from on-land to underwater via a vision-based soft robotic finger that learns 6D forces and torques (FT) using a supervised variational autoencoder (SVAE). A high-framerate camera captures the whole-body deformations while a soft robotic finger interacts with physical objects on-land and underwater. Results show that the trained SVAE model learns a series of latent representations of the soft mechanics transferable from land to water, presenting a superior adaptation to the changing environments against commercial FT sensors. Soft, delicate, and reactive grasping enabled by tactile intelligence enhances the gripper's underwater interaction with improved reliability and robustness at a much-reduced cost, paving the path for learning-based intelligent grasping to support fundamental scientific discoveries in environmental and ocean research.},
keywords = {Authorship - Corresponding, Award - Front Cover, JCR Q1, Jour - Adv. Intell. Syst. (AIS)},
pubstate = {published},
tppubtype = {article}
}
Xiaobo Liu, Xudong Han, Ning Guo, Fang Wan, Chaoyang Song
Bio-inspired Proprioceptive Touch of a Soft Finger with Inner-Finger Kinesthetic Perception Journal Article
In: Biomimetics, vol. 8, no. 6, pp. 501, 2023.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q1, Jour - Biomimetics (Biomimetics)
@article{Liu2023BioInspired,
title = {Bio-inspired Proprioceptive Touch of a Soft Finger with Inner-Finger Kinesthetic Perception},
author = {Xiaobo Liu and Xudong Han and Ning Guo and Fang Wan and Chaoyang Song},
doi = {10.3390/biomimetics8060501},
year = {2023},
date = {2023-10-21},
urldate = {2023-10-21},
journal = {Biomimetics},
volume = {8},
number = {6},
pages = {501},
abstract = {In-hand object pose estimation is challenging for humans and robots due to occlusion caused by the hand and object. This paper proposes a soft finger that integrates inner vision with kinesthetic sensing to estimate object pose inspired by human fingers. The soft finger has a flexible skeleton and skin that adapts to different objects, and the skeleton deformations during interaction provide contact information obtained by the image from the inner camera. The proposed framework is an end-to-end method that uses raw images from soft fingers to estimate in-hand object pose. It consists of an encoder for kinesthetic information processing and an object pose and category estimator. The framework was tested on seven objects, achieving an impressive error of 2.02 mm and 11.34 degrees for pose error and 99.05% for classification.},
keywords = {Authorship - Corresponding, JCR Q1, Jour - Biomimetics (Biomimetics)},
pubstate = {published},
tppubtype = {article}
}
Yuping Gu, Ziqian Wang, Shihao Feng, Haoran Sun, Haibo Lu, Jia Pan, Fang Wan, Chaoyang Song
Computational Design Towards Energy Efficient Optimization in Overconstrained Robotic Limbs Journal Article
In: Journal of Computational Design and Engineering, vol. 10, iss. October, no. 5, pp. 1941–1956, 2023.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Award - Editor's Choice, JCR Q1, Jour - J. Comput. Des. Eng. (JCDE)
@article{Gu2023ComputationalDesign,
title = {Computational Design Towards Energy Efficient Optimization in Overconstrained Robotic Limbs},
author = {Yuping Gu and Ziqian Wang and Shihao Feng and Haoran Sun and Haibo Lu and Jia Pan and Fang Wan and Chaoyang Song},
doi = {10.1093/jcde/qwad083},
year = {2023},
date = {2023-08-22},
urldate = {2023-08-22},
journal = {Journal of Computational Design and Engineering},
volume = {10},
number = {5},
issue = {October},
pages = {1941–1956},
abstract = {Legged robots are constantly evolving, and energy efficiency is a major driving factor in their design. However, combining mechanism efficiency and trajectory planning can be challenging. This work proposes a computational optimization framework for optimizing leg design during basic walking while maximizing energy efficiency. We generalize the robotic limb design as a four-bar linkage-based design pool and optimize the leg using an evolutionary algorithm. The leg configuration and design parameters are optimized based on user-defined objective functions. Our framework was validated by comparing it to measured data on our prototype quadruped robot for forward trotting. The Bennett robotic leg was advantageous for omni-directional locomotion with enhanced energy efficiency.},
keywords = {Authorship - Corresponding, Award - Editor's Choice, JCR Q1, Jour - J. Comput. Des. Eng. (JCDE)},
pubstate = {published},
tppubtype = {article}
}
Jiayu Huo, Jingran Wang, Yuqin Guo, Wanghongjie Qiu, Mingdong Chen, Harry Asada, Fang Wan, Chaoyang Song
Reconfigurable Design and Modeling of an Underwater Superlimb for Diving Assistance Journal Article
In: Advanced Intelligent Systems, vol. 5, iss. November, no. 11, pp. 2300245, 2023.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Award - Back Cover, Award - Editor's Choice, JCR Q1, Jour - Adv. Intell. Syst. (AIS)
@article{Huo2023ReconfigurableDesign,
title = {Reconfigurable Design and Modeling of an Underwater Superlimb for Diving Assistance},
author = {Jiayu Huo and Jingran Wang and Yuqin Guo and Wanghongjie Qiu and Mingdong Chen and Harry Asada and Fang Wan and Chaoyang Song},
doi = {10.1002/aisy.202300245},
year = {2023},
date = {2023-08-17},
urldate = {2023-08-17},
journal = {Advanced Intelligent Systems},
volume = {5},
number = {11},
issue = {November},
pages = {2300245},
abstract = {This study presents the design of an underwater superlimb as a wearable robot, providing divers with mobility assistance and freeing their hands for manipulating tools underwater. The wearable design features a thrust vectoring system with two 3D-printed, waterproofed modules. The module with adjustable connections and strapping holes is designed to enable reconfiguration for multiple purposes, including regular use as an underwater superlimb for divers, manually operated as a handheld glider for swimmers, combined with an amphibian, legged robot as a quadruped superlimb, and coupled as a dual-unit autonomous underwater vehicle for underwater navigation. The kinematics and dynamics of the prototype and all of its reconfigured modes are developed. A sliding-mode controller is also introduced to achieve stable simulation in PyBullet. Field tests further support the feasibility of the underwater superlimb when worn on a test diver in a swimming pool. As the first underwater superlimb presented in the literature, this study opens new doors for supernumerary robotic limbs in underwater scenarios with multifunctional reconfiguration.},
keywords = {Authorship - Corresponding, Award - Back Cover, Award - Editor's Choice, JCR Q1, Jour - Adv. Intell. Syst. (AIS)},
pubstate = {published},
tppubtype = {article}
}
Haoran Sun, Linhan Yang, Yuping Gu, Jia Pan, Fang Wan, Chaoyang Song
Bridging Locomotion and Manipulation Using Reconfigurable Robotic Limbs via Reinforcement Learning Journal Article
In: Biomimetics, vol. 8, no. 4, pp. 364, 2023.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q1, Jour - Biomimetics (Biomimetics)
@article{Sun2023BridgingLocomotion,
title = {Bridging Locomotion and Manipulation Using Reconfigurable Robotic Limbs via Reinforcement Learning},
author = {Haoran Sun and Linhan Yang and Yuping Gu and Jia Pan and Fang Wan and Chaoyang Song},
doi = {10.3390/biomimetics8040364},
year = {2023},
date = {2023-08-14},
urldate = {2023-08-14},
journal = {Biomimetics},
volume = {8},
number = {4},
pages = {364},
abstract = {Locomotion and manipulation are two essential skills in robotics but are often divided or decoupled into two separate problems. It is widely accepted that the topological duality between multi-legged locomotion and multi-fingered manipulation shares an intrinsic model. However, a lack of research remains to identify the data-driven evidence for further research. This paper explores a unified formulation of the loco-manipulation problem using reinforcement learning (RL) by reconfiguring robotic limbs with an overconstrained design into multi-legged and multi-fingered robots. Such design reconfiguration allows for adopting a co-training architecture for reinforcement learning towards a unified loco-manipulation policy. As a result, we find data-driven evidence to support the transferability between locomotion and manipulation skills using a single RL policy with a multilayer perceptron or graph neural network. We also demonstrate the Sim2Real transfer of the learned loco-manipulation skills in a robotic prototype. This work expands the knowledge frontiers on loco-manipulation transferability with learning-based evidence applied in a novel platform with overconstrained robotic limbs.},
keywords = {Authorship - Corresponding, JCR Q1, Jour - Biomimetics (Biomimetics)},
pubstate = {published},
tppubtype = {article}
}
Yuping Gu, Shihao Feng, Yuqin Guo, Fang Wan, Jiansheng Dai, Jia Pan, Chaoyang Song
Overconstrained Coaxial Design of Robotic Legs with Omni-directional Locomotion Journal Article
In: Mechanism and Machine Theory, vol. 176, iss. October, pp. 105018, 2022.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q1, Jour - Mech. Mach. Theory (MMT)
@article{Gu2022OverconstrainedCoaxial,
title = {Overconstrained Coaxial Design of Robotic Legs with Omni-directional Locomotion},
author = {Yuping Gu and Shihao Feng and Yuqin Guo and Fang Wan and Jiansheng Dai and Jia Pan and Chaoyang Song},
doi = {10.1016/j.mechmachtheory.2022.105018},
year = {2022},
date = {2022-10-01},
urldate = {2022-10-01},
journal = {Mechanism and Machine Theory},
volume = {176},
issue = {October},
pages = {105018},
abstract = {While being extensively researched in literature, overconstrained linkages’ engineering potential is yet to be explored. This study investigates the design of overconstrained linkages as robotic legs with coaxial actuation starting with the simplest case, Bennett linkage, to establish the theoretical foundations and engineering advantages of a class of overconstrained robots. We proposed a parametric design of the spatial links and joints in alternative forms so that one can fabricate these overconstrained limbs via 3D printing and then attach the linkage coaxially to a pair of servo actuators as a reconfigurable leg module. We adopted multi-objective optimization to refine the design parameters by analyzing its manipulability metric and force transmission, enabling omni-directional ground locomotion projected from a three-dimensional surface workspace. The proposed prototype quadruped was capable of omni-directional locomotion and had a minimal turning radius (0.2 Body Length) using the fewest actuators. We further explored the kinematics and design potentials to generalize the proposed method for all overconstrained 5R and 6R linkages, paving the path for a future direction in overconstrained robotics.},
keywords = {Authorship - Corresponding, JCR Q1, Jour - Mech. Mach. Theory (MMT)},
pubstate = {published},
tppubtype = {article}
}
Chaoyang Song, Jianxi Luo, Katja Hölttä-Otto, Warren Seering, Kevin Otto
Crowdfunding for Design Innovation: Prediction Model with Critical Factors Journal Article
In: IEEE Transactions on Engineering Management, vol. 69, iss. August, no. 4, pp. 1565-1576, 2022.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Authorship - First Author, JCR Q1, Jour - IEEE Trans. Eng. Manag. (TEM)
@article{Song2022CrowdfunndingFor,
title = {Crowdfunding for Design Innovation: Prediction Model with Critical Factors},
author = {Chaoyang Song and Jianxi Luo and Katja Hölttä-Otto and Warren Seering and Kevin Otto},
doi = {10.1109/tem.2020.3001764},
year = {2022},
date = {2022-08-01},
urldate = {2022-08-01},
journal = {IEEE Transactions on Engineering Management},
volume = {69},
number = {4},
issue = {August},
pages = {1565-1576},
abstract = {Online reward-based crowdfunding campaigns have emerged as an innovative approach for validating demands, discovering early adopters, and seeking learning and feedback in the design processes of innovative products. However, crowdfunding campaigns for innovative products are faced with a high degree of uncertainty and suffer meager rates of success to fulfill their values for design. To guide designers and innovators for crowdfunding campaigns, this article presents a data-driven methodology to build a prediction model with critical factors for crowdfunding success, based on public online crowdfunding campaign data. Specifically, the methodology filters 26 candidate factors in the real-win-worth framework and identifies the critical ones via stepwise regression to predict the amount of crowdfunding. We demonstrate the methods via deriving prediction models and identifying essential factors from three-dimensional printer and smartwatch campaign data on Kickstarter and Indiegogo. The critical factors can guide campaign developments, and the prediction model may evaluate crowdfunding potential of innovations in contexts, to increase the chance of crowdfunding success of innovative products.
},
keywords = {Authorship - Corresponding, Authorship - First Author, JCR Q1, Jour - IEEE Trans. Eng. Manag. (TEM)},
pubstate = {published},
tppubtype = {article}
}
Haokun Wang, Xiaobo Liu, Nuofan Qiu, Ning Guo, Fang Wan, Chaoyang Song
DeepClaw 2.0: A Data Collection Platform for Learning Human Manipulation Journal Article
In: Frontiers in Robotics and AI, vol. 9, pp. 787291, 2022.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q2, Jour - Front. Robot. AI. (FROBT)
@article{Wang2022DeepClaw2.0,
title = {DeepClaw 2.0: A Data Collection Platform for Learning Human Manipulation},
author = {Haokun Wang and Xiaobo Liu and Nuofan Qiu and Ning Guo and Fang Wan and Chaoyang Song},
url = {Sec. Computational Intelligence in Robotics},
doi = {10.3389/frobt.2022.787291},
year = {2022},
date = {2022-03-15},
urldate = {2022-03-15},
journal = {Frontiers in Robotics and AI},
volume = {9},
pages = {787291},
abstract = {Besides direct interaction, human hands are also skilled at using tools to manipulate objects for typical life and work tasks. This paper proposes DeepClaw 2.0 as a low-cost, open-sourced data collection platform for learning human manipulation. We use an RGB-D camera to visually track the motion and deformation of a pair of soft finger networks on a modified kitchen tong operated by human teachers. These fingers can be easily integrated with robotic grippers to bridge the structural mismatch between humans and robots during learning. The deformation of soft finger networks, which reveals tactile information in contact-rich manipulation, is captured passively. We collected a comprehensive sample dataset involving five human demonstrators in ten manipulation tasks with five trials per task. As a low-cost, open-sourced platform, we also developed an intuitive interface that converts the raw sensor data into state-action data for imitation learning problems. For learning-by-demonstration problems, we further demonstrated our dataset’s potential by using real robotic hardware to collect joint actuation data or using a simulated environment when limited access to the hardware.},
keywords = {Authorship - Corresponding, JCR Q2, Jour - Front. Robot. AI. (FROBT)},
pubstate = {published},
tppubtype = {article}
}
Haiyang Jiang, Xudong Han, Yonglin Jing, Ning Guo, Fang Wan, Chaoyang Song
Rigid-Soft Interactive Design of a Lobster-Inspired Finger Surface for Enhanced Grasping Underwater Journal Article
In: Frontiers in Robotics and AI, vol. 8, pp. 787187, 2021.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q2, Jour - Front. Robot. AI. (FROBT)
@article{Jiang2021RigidSoft,
title = {Rigid-Soft Interactive Design of a Lobster-Inspired Finger Surface for Enhanced Grasping Underwater},
author = {Haiyang Jiang and Xudong Han and Yonglin Jing and Ning Guo and Fang Wan and Chaoyang Song},
url = {Sec. Soft Robotics},
doi = {10.3389/frobt.2021.787187},
year = {2021},
date = {2021-12-22},
urldate = {2021-12-22},
issuetitle = {Section Soft Robotics},
journal = {Frontiers in Robotics and AI},
volume = {8},
pages = {787187},
abstract = {Bio-inspirations from soft-bodied animals provide a rich design source for soft robots, yet limited literature explored the potential enhancement from rigid-bodied ones. This paper draws inspiration from the tooth profiles of the rigid claws of the Boston Lobster, aiming at an enhanced soft finger surface for underwater grasping using an iterative design process. The lobsters distinguish themselves from other marine animals with a pair of claws capable of dexterous object manipulation both on land and underwater. We proposed a 3-stage design iteration process that involves raw imitation, design parametric exploration, and bionic parametric exploitation on the original tooth profiles on the claws of the Boston Lobster. Eventually, 7 finger surface designs were generated and fabricated with soft silicone. We validated each design stage through many vision-based robotic grasping attempts against selected objects from the Evolved Grasping Analysis Dataset (EGAD). Over 14,000 grasp attempts were accumulated on land (71.4%) and underwater (28.6%), where we selected the optimal design through an on-land experiment and further tested its capability underwater. As a result, we observed an 18.2% improvement in grasping success rate at most from a resultant bionic finger surface design, compared with those without the surface, and a 10.4% improvement at most compared with the validation design from the previous literature. Results from this paper are relevant and consistent with the bioresearch earlier in 1911, showing the value of bionics. The results indicate the capability and competence of the optimal bionic finger surface design in an amphibious environment, which can contribute to future research in enhanced underwater grasping using soft robots.},
keywords = {Authorship - Corresponding, JCR Q2, Jour - Front. Robot. AI. (FROBT)},
pubstate = {published},
tppubtype = {article}
}
Baiyue Wang, Weijie Guo, Shihao Feng, Hongdong Yi, Fang Wan, Chaoyang Song
Volumetrically Enhanced Soft Actuator with Proprioceptive Sensing Journal Article
In: IEEE Robotics and Automation Letters, vol. 6, iss. July, no. 3, pp. 5284-5291, 2021.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q2, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track
@article{Wang2021VolumetricallyEnhanced,
title = {Volumetrically Enhanced Soft Actuator with Proprioceptive Sensing},
author = {Baiyue Wang and Weijie Guo and Shihao Feng and Hongdong Yi and Fang Wan and Chaoyang Song},
doi = {10.1109/LRA.2021.3072859},
year = {2021},
date = {2021-07-01},
urldate = {2021-07-01},
journal = {IEEE Robotics and Automation Letters},
volume = {6},
number = {3},
issue = {July},
pages = {5284-5291},
abstract = {Soft robots often show a superior power-to-weight ratio using highly compliant, light-weight material, which leverages various bio-inspired body designs to generate desirable deformations for life-like motions. In this letter, given that most material used for soft robots is light-weight in general, we propose a volumetrically enhanced design strategy for soft robots, providing a novel design guideline to govern the form factor of soft robots. We present the design, modeling, and optimization of a volumetrically enhanced soft actuator (VESA) with linear and rotary motions, respectively, achieving superior force and torque output, linear and rotary displacement, and overall extension ratio per unit volume. We further explored VESA's proprioceptive sensing capability by validating the output force and torque through analytical modeling and experimental verification. Our results show that the volumetric metrics hold the potential to be used as a practical design guideline to optimize soft robots’ engineering performance.},
keywords = {Authorship - Corresponding, JCR Q2, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track},
pubstate = {published},
tppubtype = {article}
}
Linhan Yang, Xudong Han, Weijie Guo, Fang Wan, Jia Pan, Chaoyang Song
Learning-based Optoelectronically Innervated Tactile Finger for Rigid-Soft Interactive Grasping Journal Article
In: IEEE Robotics and Automation Letters, vol. 6, iss. April, no. 2, pp. 3817 - 3824, 2021.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q2, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track
@article{Yang2021LearningBased,
title = {Learning-based Optoelectronically Innervated Tactile Finger for Rigid-Soft Interactive Grasping},
author = {Linhan Yang and Xudong Han and Weijie Guo and Fang Wan and Jia Pan and Chaoyang Song},
doi = {10.1109/LRA.2021.3065186},
year = {2021},
date = {2021-04-01},
urldate = {2021-04-01},
booktitle = {IEEE International Conference on Robotics and Automation (ICRA)},
journal = {IEEE Robotics and Automation Letters},
volume = {6},
number = {2},
issue = {April},
pages = {3817 - 3824},
address = {Xi’an, China},
abstract = {This letter presents a novel design of a soft tactile finger with omni-directional adaptation using multi-channel optical fibers for rigid-soft interactive grasping. Machine learning methods are used to train a model for real-time prediction of force, torque, and contact using the tactile data collected. We further integrated such fingers in a reconfigurable gripper design with three fingers so that the finger arrangement can be actively adjusted in real-time based on the tactile data collected during grasping, achieving the process of rigid-soft interactive grasping. Detailed sensor calibration and experimental results are also included to further validate the proposed design for enhanced grasping robustness. Video: https://www.youtube.com/watch?v=ynCfSA4FQnY.},
keywords = {Authorship - Corresponding, JCR Q2, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track},
pubstate = {published},
tppubtype = {article}
}
Fang Wan, Haokun Wang, Jiyuan Wu, Yujia Liu, Sheng Ge, Chaoyang Song
A Reconfigurable Design for Omni-adaptive Grasp Learning Journal Article
In: IEEE Robotics and Automation Letters, vol. 5, iss. July, no. 3, pp. 4210-4217, 2020.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q2, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track
@article{Wan2020AReconfigurable,
title = {A Reconfigurable Design for Omni-adaptive Grasp Learning},
author = {Fang Wan and Haokun Wang and Jiyuan Wu and Yujia Liu and Sheng Ge and Chaoyang Song},
doi = {10.1109/lra.2020.2982059},
year = {2020},
date = {2020-07-01},
urldate = {2020-07-01},
journal = {IEEE Robotics and Automation Letters},
volume = {5},
number = {3},
issue = {July},
pages = {4210-4217},
abstract = {The engineering design of robotic grippers presents an ample design space for optimization towards robust grasping. In this letter, we investigate how learning method can be used to support the design reconfiguration of robotic grippers for grasping using a novel soft structure with omni-directional adaptation. We propose a gripper system that is reconfigurable in terms of the number and arrangement of the proposed finger, which generates a large number of possible design configurations. Such design reconfigurations with omni-adaptive fingers enables us to systematically investigate the optimal arrangement of the fingers towards robust grasping. Furthermore, we adopt a learning-based method as the baseline to benchmark the effectiveness of each design configuration. As a result, we found that the 3-finger radial configuration is suitable for space-saving and cost-effectiveness, achieving an average 96% grasp success rate on seen and novel objects selected from the YCB dataset. The 4-finger radial arrangement can be applied to cases that require a higher payload with even distribution. We achieved dimension reduction using the radial gripper design with the removal of z-axis rotation during grasping. We also reported the different outcomes with or without friction enhancement of the soft finger network.},
keywords = {Authorship - Corresponding, JCR Q2, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track},
pubstate = {published},
tppubtype = {article}
}
Fang Wan, Chaoyang Song
Flange-Based Hand-Eye Calibration Using a 3D Camera with High Resolution, Accuracy, and Frame Rate Journal Article
In: Frontiers in Robotics and AI, vol. 7, pp. 65, 2020.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q2, Jour - Front. Robot. AI. (FROBT)
@article{Wan2020FlangeBased,
title = {Flange-Based Hand-Eye Calibration Using a 3D Camera with High Resolution, Accuracy, and Frame Rate},
author = {Fang Wan and Chaoyang Song},
doi = {10.3389/frobt.2020.00065},
year = {2020},
date = {2020-05-29},
urldate = {2020-05-29},
journal = {Frontiers in Robotics and AI},
volume = {7},
pages = {65},
abstract = {Point cloud data provides three-dimensional (3D) measurement of the geometric details in the physical world, which relies heavily on the quality of the machine vision system. In this paper, we explore the potentials of a 3D scanner of high quality (15 million points per second), accuracy (up to 0.150 mm), and frame rate (up to 20 FPS) during static and dynamic measurements of the robot flange for direct hand-eye calibration and trajectory error tracking. With the availability of high-quality point cloud data, we can exploit the standardized geometric features on the robot flange for 3D measurement, which are directly accessible for hand-eye calibration problems. In the meanwhile, we tested the proposed flange-based calibration methods in a dynamic setting to capture point cloud data in a high frame rate. We found that our proposed method works robustly even in dynamic environments, enabling a versatile hand-eye calibration during motion. Furthermore, capturing high-quality point cloud data in real-time opens new doors for the use of 3D scanners, capable of detecting sensitive anomalies of refined details even in motion trajectories. Codes and sample data of this calibration method is provided at Github (https://github.com/ancorasir/flange_handeye_calibration).},
keywords = {Authorship - Corresponding, JCR Q2, Jour - Front. Robot. AI. (FROBT)},
pubstate = {published},
tppubtype = {article}
}
Linhan Yang, Fang Wan, Haokun Wang, Xiaobo Liu, Yujia Liu, Jia Pan, Chaoyang Song
Rigid-Soft Interactive Learning for Robust Grasping Journal Article
In: IEEE Robotics and Automation Letters, vol. 5, iss. April, no. 2, pp. 1720 - 1727, 2020.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q2, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track
@article{Yang2020RigidSoft,
title = {Rigid-Soft Interactive Learning for Robust Grasping},
author = {Linhan Yang and Fang Wan and Haokun Wang and Xiaobo Liu and Yujia Liu and Jia Pan and Chaoyang Song},
doi = {10.1109/lra.2020.2969932},
year = {2020},
date = {2020-04-01},
urldate = {2020-04-01},
booktitle = {IEEE International Conference on Robotics and Automation (ICRA)},
journal = {IEEE Robotics and Automation Letters},
volume = {5},
number = {2},
issue = {April},
pages = {1720 - 1727},
address = {Paris, France},
abstract = {Robot learning is widely accepted by academia and industry with its potentials to transform autonomous robot control through machine learning. Inspired by widely used soft fingers on grasping, we propose a method of rigid-soft interactive learning, aiming at reducing the time of data collection. In this letter, we classify the interaction categories into Rigid-Rigid, Rigid-Soft, SoftRigid according to the interaction surface between grippers and target objects. We find experimental evidence that the interaction types between grippers and target objects play an essential role in the learning methods. We use soft, stuffed toys for training, instead of everyday objects, to reduce the integration complexity and computational burden. Although the stuffed toys are limited in reflecting the physics of finger-object interaction in real-life scenarios, we exploit such rigid-soft interaction by changing the gripper fingers to the soft ones when dealing with rigid, daily-life items such as the Yale-CMU-Berkeley (YCB) objects. With a small data collection of 5 K picking attempts in total, our results suggest that such Rigid-Soft and Soft-Rigid interactions are transferable. Moreover, the combination of such interactions shows better performance on the grasping test. We also explore the effect of the grasp type on the learning method by changing the gripper configurations. We achieve the best grasping performance at 97.5% for easy YCB objects and 81.3% for difficult YCB objects while using a precise grasp with a two-soft-finger gripper to collect training data and power grasp with a four-soft-finger gripper to test the grasp policy.},
keywords = {Authorship - Corresponding, JCR Q2, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track},
pubstate = {published},
tppubtype = {article}
}
Fang Wan, Chaoyang Song
A Neural Network with Logical Reasoning based on Auxiliary Inputs Journal Article
In: Frontiers in Robotics and AI, vol. 5, pp. 86, 2018.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q2, Jour - Front. Robot. AI. (FROBT)
@article{Wan2018ANeural,
title = {A Neural Network with Logical Reasoning based on Auxiliary Inputs},
author = {Fang Wan and Chaoyang Song},
url = {Sec. Computational Intelligence in Robotics},
doi = {10.3389/frobt.2018.00086},
year = {2018},
date = {2018-07-30},
urldate = {2018-07-30},
journal = {Frontiers in Robotics and AI},
volume = {5},
pages = {86},
abstract = {This paper describes a neural network design using auxiliary inputs, namely the indicators, that act as the hints to explain the predicted outcome through logical reasoning, mimicking the human behavior of deductive reasoning. Besides the original network input and output, we add an auxiliary input that reflects the specific logic of the data to formulate a reasoning process for cross-validation. We found that one can design either meaningful indicators, or even meaningless ones, when using such auxiliary inputs, upon which one can use as the basis of reasoning to explain the predicted outputs. As a result, one can formulate different reasonings to explain the predicted results by designing different sets of auxiliary inputs without the loss of trustworthiness of the outcome. This is similar to human explanation process where one can explain the same observation from different perspectives with reasons. We demonstrate our network concept by using the MNIST data with different sets of auxiliary inputs, where a series of design guidelines are concluded. Later, we validated our results by using a set of images taken from a robotic grasping platform. We found that our network enhanced the last 1–2% of the prediction accuracy while eliminating questionable predictions with self-conflicting logics. Future application of our network with auxiliary inputs can be applied to robotic detection problems such as autonomous object grasping, where the logical reasoning can be introduced to optimize robotic learning.},
keywords = {Authorship - Corresponding, JCR Q2, Jour - Front. Robot. AI. (FROBT)},
pubstate = {published},
tppubtype = {article}
}
Yaohui Chen, Fang Wan, Tong Wu, Chaoyang Song
Soft-Rigid Interaction Mechanism towards a Lobster-inspired Hybrid Actuator Journal Article
In: Journal of Micromechanics and Microengineering, vol. 28, iss. December, no. 1, pp. 014007, 2017.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q2, Jour - J. Micromech. Microeng. (JMM)
@article{Chen2017SoftRigid,
title = {Soft-Rigid Interaction Mechanism towards a Lobster-inspired Hybrid Actuator},
author = {Yaohui Chen and Fang Wan and Tong Wu and Chaoyang Song},
doi = {10.1088/1361-6439/aa9e25},
year = {2017},
date = {2017-12-15},
urldate = {2017-12-15},
issuetitle = {Special Issue on Soft Robotics and Smart System Technologies},
journal = {Journal of Micromechanics and Microengineering},
volume = {28},
number = {1},
issue = {December},
pages = {014007},
abstract = {Soft pneumatic actuators (SPAs) are intrinsically light-weight, compliant and therefore ideal to directly interact with humans and be implemented into wearable robotic devices. However, they also pose new challenges in describing and sensing their continuous deformation. In this paper, we propose a hybrid actuator design with bio-inspirations from the lobsters, which can generate reconfigurable bending movements through the internal soft chamber interacting with the external rigid shells. This design with joint and link structures enables us to exactly track its bending configurations that previously posed a significant challenge to soft robots. Analytic models are developed to illustrate the soft-rigid interaction mechanism with experimental validation. A robotic glove using hybrid actuators to assist grasping is assembled to illustrate their potentials in safe human-robot interactions. Considering all the design merits, our work presents a practical approach to the design of next-generation robots capable of achieving both good accuracy and compliance.},
keywords = {Authorship - Corresponding, JCR Q2, Jour - J. Micromech. Microeng. (JMM)},
pubstate = {published},
tppubtype = {article}
}
Conference Papers
Xudong Han, Ning Guo, Shuqiao Zhong, Zhiyuan Zhou, Jian Lin, Chaoyang Song, Fang Wan
Proprioceptive State Estimation for Amphibious Tactile Sensing Conference
IEEE International Conference on Robotics and Automation (ICRA2025), Atlanta, USA, 2025, (Dual-track Submission with TRO: https://doi.org/10.1109/TRO.2024.3463509).
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Conf - ICRA, Special - Dual-Track
@conference{Han2025ProprioceptiveState,
title = {Proprioceptive State Estimation for Amphibious Tactile Sensing},
author = {Xudong Han and Ning Guo and Shuqiao Zhong and Zhiyuan Zhou and Jian Lin and Chaoyang Song and Fang Wan},
url = {https://github.com/ancorasir/PropSE},
doi = {10.1109/TRO.2024.3463509},
year = {2025},
date = {2025-03-07},
urldate = {2025-03-07},
booktitle = {IEEE International Conference on Robotics and Automation (ICRA2025)},
address = {Atlanta, USA},
abstract = {This paper presents a novel vision-based proprioception approach for a soft robotic finger that can estimate and reconstruct tactile interactions in terrestrial and aquatic environments. The key to this system lies in the finger's unique metamaterial structure, which facilitates omni-directional passive adaptation during grasping, protecting delicate objects across diverse scenarios. A compact in-finger camera captures high-framerate images of the finger's deformation during contact, extracting crucial tactile data in real time. We present a volumetric discretized model of the soft finger and use the geometry constraints captured by the camera to find the optimal estimation of the deformed shape. The approach is benchmarked using a motion capture system with sparse markers and a haptic device with dense measurements. Both results show state-of-the-art accuracies, with a median error of 1.96 mm for overall body deformation, corresponding to 2.1% of the finger's length. More importantly, the state estimation is robust in both on-land and underwater environments, as we demonstrate its usage for underwater object shape sensing. This combination of passive adaptation and real-time tactile sensing paves the way for amphibious robotic grasping applications. All codes are shared on GitHub: https://github.com/ancorasir/PropSE.},
note = {Dual-track Submission with TRO: https://doi.org/10.1109/TRO.2024.3463509},
keywords = {Authorship - Corresponding, Conf - ICRA, Special - Dual-Track},
pubstate = {published},
tppubtype = {conference}
}
Haoran Sun, Shihao Feng, Bangchao Huang, Zishang Zhang, Ronghan Xu, Guojing Huang, Guangyi Huang, Jiayi Yin, Nuofan Qiu, Hua Chen, Wei Zhang, Jia Pan, Fang Wan, Chaoyang Song
Overconstrained Locomotion Conference
International Symposium of Robotics Research (ISRR2024), Long Beach, California, USA, 2024, (Accepted).
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Conf - ISRR
@conference{Sun2024OCLocomotion,
title = {Overconstrained Locomotion},
author = {Haoran Sun and Shihao Feng and Bangchao Huang and Zishang Zhang and Ronghan Xu and Guojing Huang and Guangyi Huang and Jiayi Yin and Nuofan Qiu and Hua Chen and Wei Zhang and Jia Pan and Fang Wan and Chaoyang Song},
url = {https://isrr2024.su.domains/},
doi = {10.48550/arXiv.2310.09824},
year = {2024},
date = {2024-12-08},
urldate = {2024-12-08},
booktitle = {International Symposium of Robotics Research (ISRR2024)},
address = {Long Beach, California, USA},
abstract = {This paper studies the design, modeling, and control of a novel robotic limb that produces overconstrained locomotion by employing the Bennett linkage for motion generation, capable of parametric reconfiguration between a reptile- and mammal-inspired morphology within a single quadruped. In contrast to the prevailing focus on planar linkages, this research delves into adopting overconstrained linkages as the limb mechanism. The overconstrained linkages have solid theoretical foundations in advanced kinematics but are under-explored in robotic applications. This study showcases the morphological superiority of Overconstrained Robotic Limbs (ORLs) that can transform into planar or spherical limbs, exemplified using the simplest case of a Bennett linkage as an ORL. We apply Model Predictive Control (MPC) to simulate a range of overconstrained locomotion tasks, revealing its superiority in energy efficiency against planar limbs when considering foothold distances and speeds. From an evolutionary biology perspective, these findings highlight the mechanism distinctions in limb design between reptiles and mammals and represent the first documented instance of ORLs outperforming planar limb designs in dynamic locomotion.},
note = {Accepted},
keywords = {Authorship - Corresponding, Conf - ISRR},
pubstate = {published},
tppubtype = {conference}
}
Linhan Yang, Lei Yang, Haoran Sun, Zeqing Zhang, Haibin He, Fang Wan, Chaoyang Song, Jia Pan
One Fling to Goal: Environment-aware Dynamics for Goal-conditioned Fabric Flinging Conference
Workshop on the Algorithmic Foundations of Robotics (WAFR2024), Chicago, USA, 2024, (Accepted).
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Conf - WAFR
@conference{Yang2024OneFling,
title = {One Fling to Goal: Environment-aware Dynamics for Goal-conditioned Fabric Flinging},
author = {Linhan Yang and Lei Yang and Haoran Sun and Zeqing Zhang and Haibin He and Fang Wan and Chaoyang Song and Jia Pan},
url = {https://www.algorithmic-robotics.org/},
doi = {10.48550/arXiv.2406.14136},
year = {2024},
date = {2024-10-07},
urldate = {2024-10-07},
booktitle = {Workshop on the Algorithmic Foundations of Robotics (WAFR2024)},
address = {Chicago, USA},
abstract = {Fabric manipulation dynamically is commonly seen in manufacturing and domestic settings. While dynamically manipulating a fabric piece to reach a target state is highly efficient, this task presents considerable challenges due to the varying properties of different fabrics, complex dynamics when interacting with environments, and meeting required goal conditions. To address these challenges, we present One Fling to Goal, an algorithm capable of handling fabric pieces with diverse shapes and physical properties across various scenarios. Our method learns a graph-based dynamics model equipped with environmental awareness. With this dynamics model, we devise a real-time controller to enable high-speed fabric manipulation in one attempt, requiring less than 3 seconds to finish the goal-conditioned task. We experimentally validate our method on a goal-conditioned manipulation task in five diverse scenarios. Our method significantly improves this goal-conditioned task, achieving an average error of 13.2mm in complex scenarios. Our method can be seamlessly transferred to real-world robotic systems and generalized to unseen scenarios in a zero-shot manner.},
note = {Accepted},
keywords = {Authorship - Corresponding, Conf - WAFR},
pubstate = {published},
tppubtype = {conference}
}
Sen Li, Fang Wan, Chaoyang Song
Active Surface with Passive Omni-Directional Adaptation for In-Hand Manipulation Conference
IEEE/IFToMM International Conference on Reconfigurable Mechanisms and Robots (ReMAR2024), Chicago, USA, 2024.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Conf - ReMAR
@conference{Li2024ActiveSurface,
title = {Active Surface with Passive Omni-Directional Adaptation for In-Hand Manipulation},
author = {Sen Li and Fang Wan and Chaoyang Song},
url = {https://iftomm-world.org/conferences/remar2024/#:~:text=Following%20successful%20completion%20in%20London%20(2009),%20Tianjin%20(2012),%20Beijing},
doi = {10.1109/ReMAR61031.2024.10619925},
year = {2024},
date = {2024-06-23},
urldate = {2024-06-23},
booktitle = {IEEE/IFToMM International Conference on Reconfigurable Mechanisms and Robots (ReMAR2024)},
address = {Chicago, USA},
abstract = {Soft fingers with omni-directional adaptability ex- cel in 3D twisting, outperforming two-dimensional self-adaptive hands using a finger rotation mechanism to achieve similar adaptability. In this study, we present the design of a soft robotic finger with an active surface on an omni-adaptive structure, which can be easily installed on existing grippers and achieve stability and dexterity for in-hand manipulation. The system’s active surfaces initially transfer the object from the fingertip segment with less compliance to the middle segment of the finger with superior adaptability. Despite the omni-directional deformation of the finger, in-hand manipulation can still be executed with controlled active surfaces. We characterized the soft finger’s stiffness distribution and simplified models to assess the feasibility of lifting and reorienting a grasped object in a 3D twisting state. A set of experiments on in-hand manipulation was performed with the proposed fingers, demonstrating the dexterity and robustness of the strategy.},
keywords = {Authorship - Corresponding, Conf - ReMAR},
pubstate = {published},
tppubtype = {conference}
}
Yenan Chen, Chuye Zhang, Pengxi Gu, Jianuo Qiu, Jiayi Yin, Nuofan Qiu, Guojing Huang, Bangchao Huang, Zishang Zhang, Hui Deng, Wei Zhang, Fang Wan, Chaoyang Song
IEEE/IFToMM International Conference on Reconfigurable Mechanisms and Robots (ReMAR2024), Chicago, USA, 2024.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Conf - ReMAR
@conference{Chen2024EvolutionaryMorphology,
title = {Evolutionary Morphology Towards Overconstrained Locomotion via Large-Scale, Multi-Terrain Deep Reinforcement Learning},
author = {Yenan Chen and Chuye Zhang and Pengxi Gu and Jianuo Qiu and Jiayi Yin and Nuofan Qiu and Guojing Huang and Bangchao Huang and Zishang Zhang and Hui Deng and Wei Zhang and Fang Wan and Chaoyang Song},
url = {https://iftomm-world.org/conferences/remar2024/#:~:text=Following%20successful%20completion%20in%20London%20(2009),%20Tianjin%20(2012),%20Beijing},
doi = {10.1109/ReMAR61031.2024.10618090},
year = {2024},
date = {2024-06-23},
urldate = {2024-06-23},
booktitle = {IEEE/IFToMM International Conference on Reconfigurable Mechanisms and Robots (ReMAR2024)},
address = {Chicago, USA},
abstract = {While the animals' Fin-to-Limb evolution has been well-researched in biology, such morphological trans- formation remains under-adopted in the modern design of advanced robotic limbs. This paper investigates a novel class of overconstrained locomotion from a design and learning perspective inspired by evolutionary morphology, aiming to integrate the concept of 'intelligent design under constraints' - hereafter referred to as constraint-driven design intelligence - in developing modern robotic limbs with superior energy efficiency. We propose a 3D-printable design of robotic limbs parametrically reconfigurable as a classical planar 4-bar linkage, an overconstrained Bennett linkage, and a spherical 4-bar linkage. These limbs adopt a co-axial actuation, identical to the modern legged robot platforms, with the added capability of upgrading into a wheel-legged system. Then, we implemented a large-scale, multi-terrain deep reinforcement learning framework to train these reconfigurable limbs for a comparative analysis of overconstrained locomotion in energy efficiency. Results show that the overconstrained limbs exhibit more efficient locomotion than planar limbs during forward and sideways walking over different terrains, including floors, slopes, and stairs, with or without random noises, by saving at least 22% mechanical energy in completing the traverse task, with the spherical limbs being the least efficient. It also achieves the highest average speed of 0.85m/s on flat terrain, which is 20% faster than the planar limbs. This study paves the path for an exciting direction for future research in overconstrained robotics leveraging evolutionary morphology and reconfigurable mechanism intelligence when combined with state-of-the-art methods in deep reinforcement learning.},
keywords = {Authorship - Corresponding, Conf - ReMAR},
pubstate = {published},
tppubtype = {conference}
}
Tianyu Wu, Yujian Dong, Yang Xiao, Jinqi Wei, Fang Wan, Chaoyang Song
Vision-based, Low-cost, Soft Robotic Tongs for Shareable and Reproducible Tactile Learning Conference
IEEE International Conference on Advanced Robotics and Mechatronics (ICARM2024), Tokyo, Japan, 2024.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Conf - ICARM
@conference{Wu2024VisionBasedb,
title = {Vision-based, Low-cost, Soft Robotic Tongs for Shareable and Reproducible Tactile Learning},
author = {Tianyu Wu and Yujian Dong and Yang Xiao and Jinqi Wei and Fang Wan and Chaoyang Song},
url = {https://github.com/bionicdl-sustech/SoftRoboticTongs},
doi = {10.1109/ICARM62033.2024.10715842},
year = {2024},
date = {2024-06-01},
urldate = {2024-06-01},
booktitle = {IEEE International Conference on Advanced Robotics and Mechatronics (ICARM2024)},
address = {Tokyo, Japan},
abstract = {Recent research shows a growing interest in adopting touch interaction for robot learning, yet it remains challenging to efficiently acquire high-quality, structured tactile data at a low cost. In this study, we propose the design of vision-based soft robotic tongs to generate reproducible and shareable data of tactile interaction for learning. We further developed a web-based platform for convenient data collection and a portable assembly that can be deployed within minutes. We trained a simple network to infer the 6D force and torque using relative pose data from markers on the fingers and reached a reasonably high accuracy (an MAE of 0.548 N at 60 Hz within [0,20] N) but cost only 50 USD per set. The recorded tactile data is downloadable for robot learning. We further demonstrated the system for interacting with robotic arms in manipulation learning and remote control. We have open-sourced the system on GitHub with further information. (https://github.com/bionicdl-sustech/SoftRoboticTongs)},
keywords = {Authorship - Corresponding, Conf - ICARM},
pubstate = {published},
tppubtype = {conference}
}
Xudong Han, Sheng Liu, Fang Wan, Chaoyang Song
Vision-based Tactile Sensing for an Omni-adaptive Soft Finger Conference
IEEE International Conference on Development and Learning (ICDL2023), Macau SAR, 2023.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Conf - ICDL
@conference{Han2023VisionBased,
title = {Vision-based Tactile Sensing for an Omni-adaptive Soft Finger},
author = {Xudong Han and Sheng Liu and Fang Wan and Chaoyang Song},
url = {https://www.proceedings.com/content/072/072332webtoc.pdf},
doi = {10.1109/ICDL55364.2023.10364455},
year = {2023},
date = {2023-11-09},
urldate = {2023-11-09},
booktitle = {IEEE International Conference on Development and Learning (ICDL2023)},
address = {Macau SAR},
abstract = {Vision-based tactile sensing provides a novel solution to robotic proprioception using visual information to infer physical interaction on the contact surface. In this paper, we leveraged the omni-adaptive capability of a soft finger with differential stiffness by adding a monocular camera at its bottom to track its spatial deformation while interacting with objects. We modeled this soft finger's physical interaction and measured the stiffness distribution through experiments. The camera captured the soft finger's deformation when interacting with probes for different contact forces and positions. Using a neural network modified from AlexNet, we proposed a preliminary estimation model of the contact force and position using the captured images. The results show that the proposed method can achieve an accuracy of 90% for position estimation and a normalized root mean squared error of 3.4% for force estimation, showing the reliability and robustness of the proposed sensing method.},
keywords = {Authorship - Corresponding, Conf - ICDL},
pubstate = {published},
tppubtype = {conference}
}
Xiaobo Liu, Fang Wan, Sheng Ge, Haokun Wang, Haoran Sun, Chaoyang Song
Jigsaw-based Benchmarking for Learning Robotic Manipulation Honorable Mention Conference
IEEE International Conference on Advanced Robotics and Mechatronics (ICARM2023), Sanya, China, 2023.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Award - Best Conference Paper Finalist, Award - Paper, Conf - ICARM
@conference{Liu2023JigsawBased,
title = {Jigsaw-based Benchmarking for Learning Robotic Manipulation},
author = {Xiaobo Liu and Fang Wan and Sheng Ge and Haokun Wang and Haoran Sun and Chaoyang Song},
url = {http://www.ieee-arm.org/icarm2023/},
doi = {10.1109/ICARM58088.2023.10218784},
year = {2023},
date = {2023-07-08},
urldate = {2023-07-08},
booktitle = {IEEE International Conference on Advanced Robotics and Mechatronics (ICARM2023)},
address = {Sanya, China},
abstract = {Benchmarking provides experimental evidence of the scientific baseline to enhance the progression of fundamental research, which is also applicable to robotics. In this paper, we propose a method to benchmark metrics of robotic manipulation, which addresses the spatial-temporal reasoning skills for robot learning with the jigsaw game. In particular, our approach exploits a simple set of jigsaw pieces by designing a structured protocol, which can be highly customizable according to a wide range of task specifications. Researchers can selectively adopt the proposed protocol to benchmark their research outputs, on a comparable scale in the functional, task, and system-level of details. The purpose is to provide a potential look-up table for learning-based robot manipulation, commonly available in other engineering disciplines, to facilitate the adoption of robotics through calculated, empirical, and systematic experimental evidence.},
keywords = {Authorship - Corresponding, Award - Best Conference Paper Finalist, Award - Paper, Conf - ICARM},
pubstate = {published},
tppubtype = {conference}
}
Yuqin Guo, Rongzheng Zhang, Wanghongjie Qiu, Harry Asada, Fang Wan, Chaoyang Song
Underwater Intention Recognition using Head Motion and Throat Vibration for Supernumerary Robotic Assistance Best Paper Conference
IEEE International Conference on Automation Science and Engineering (CASE2023), Auckland, New Zealand, 2023.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Award - Best Healthcare Automation Paper, Award - Paper, Conf - CASE
@conference{Guo2023UnderwaterIntention,
title = {Underwater Intention Recognition using Head Motion and Throat Vibration for Supernumerary Robotic Assistance},
author = {Yuqin Guo and Rongzheng Zhang and Wanghongjie Qiu and Harry Asada and Fang Wan and Chaoyang Song},
url = {https://case2023.org/},
doi = {10.1109/CASE56687.2023.10260480},
year = {2023},
date = {2023-06-26},
urldate = {2023-06-26},
booktitle = {IEEE International Conference on Automation Science and Engineering (CASE2023)},
address = {Auckland, New Zealand},
abstract = {This study presents a multi-modal mechanism for recognizing human intentions while diving underwater, aiming to achieve natural human-robot interactions through an underwater superlimb for diving assistance. The underwater environment severely limits the divers' capabilities in intention expression, which becomes more challenging when they intend to operate tools while keeping control of body postures in 3D with the various diving suits and gears. The current literature is limited in underwater intention recognition, impeding the development of intelligent wearable systems for human-robot interactions underwater. Here, we present a novel solution to simultaneously detect head motion and throat vibrations under the water in a compact, wearable design. Experiment results show that using machine learning algorithms, we achieved high performance in integrating these two modalities to translate human intentions to robot control commands for an underwater superlimb system. This study's results paved the way for future development in underwater intention recognition and underwater human-robot interactions with supernumerary support.},
keywords = {Authorship - Corresponding, Award - Best Healthcare Automation Paper, Award - Paper, Conf - CASE},
pubstate = {published},
tppubtype = {conference}
}
Fang Wan, Xiaobo Liu, Ning Guo, Xudong Han, Feng Tian, Chaoyang Song
Visual Learning Towards Soft Robot Force Control using a 3D Metamaterial with Differential Stiffness Conference
Conference on Robot Learning (CoRL2021), London & Virtual, 2021.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Conf - CoRL
@conference{Wan2022VisualLearning,
title = {Visual Learning Towards Soft Robot Force Control using a 3D Metamaterial with Differential Stiffness},
author = {Fang Wan and Xiaobo Liu and Ning Guo and Xudong Han and Feng Tian and Chaoyang Song},
url = {https://proceedings.mlr.press/v164/wan22a/wan22a.pdf},
year = {2021},
date = {2021-11-08},
urldate = {2021-11-08},
booktitle = {Conference on Robot Learning (CoRL2021)},
address = {London & Virtual},
abstract = {This paper explores the feasibility of learning robot force control and interaction using soft metamaterial and machine vision. We start by investigating the differential stiffness of a hollow, cone-shaped, 3D metamaterial made from soft rubber, achieving a large stiffness ratio between the axial and radial directions that leads to an adaptive form response in omni-directions during physical interaction. Then, using image data collected from its internal deformation during various interactions, we explored two similar designs but different learning strategies to estimate force control and interactions on the end-effector of a UR10 e-series robot arm. One is to directly learn the force and torque response from raw images of the metamaterial’s internal deformation. The other is to indirectly estimate the 6D force and torque using a neural network by visually tracking the 6D pose of a marker fixed inside the 3D metamaterial. Finally, we integrated the two proposed systems and achieved similar force feedback and control interactions in simple tasks such as circle following and text writing. Our results show that the learning method holds the potential to support the concept of soft robot force control, providing an intuitive interface at a low cost for robotic systems, generating comparable and capable performances against classical force and torque sensors.},
keywords = {Authorship - Corresponding, Conf - CoRL},
pubstate = {published},
tppubtype = {conference}
}
Shihao Feng, Yuping Gu, Weijie Guo, Yuqin Guo, Fang Wan, Jia Pan, Chaoyang Song
An Overconstrained Robotic Leg with Coaxial Quasi-direct Drives for Omni-directional Ground Mobility Conference
IEEE International Conference on Robotics and Automation (ICRA2021), Xi’an, China, 2021.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Conf - ICRA
@conference{Feng2021AnOverconstrained,
title = {An Overconstrained Robotic Leg with Coaxial Quasi-direct Drives for Omni-directional Ground Mobility},
author = {Shihao Feng and Yuping Gu and Weijie Guo and Yuqin Guo and Fang Wan and Jia Pan and Chaoyang Song},
doi = {10.1109/ICRA48506.2021.9561829},
year = {2021},
date = {2021-05-30},
urldate = {2021-05-30},
booktitle = {IEEE International Conference on Robotics and Automation (ICRA2021)},
address = {Xi’an, China},
abstract = {Planar mechanisms dominate modern designs of legged robots with remote actuator placement for robust agility in ground mobility. This paper presents a novel design of robotic leg modules using the Bennett linkage, driven by two coaxially arranged quasi-direct actuators capable of omnidirectional ground locomotion. The Bennett linkage belongs to a family of overconstrained linkages with three-dimensional spatial motion and unparalleled joint axes. We present the first work regarding the design, modeling, and optimization of the Bennett leg module, enabling lateral locomotion, like the crabs, that was not capable with robotic legs designed with common planar mechanisms. We further explored the concept of overconstrained robots, which is a class of advanced robots based on the design reconfiguration of the Bennett leg modules, serving as a potential direction for future research.},
keywords = {Authorship - Corresponding, Conf - ICRA},
pubstate = {published},
tppubtype = {conference}
}
Linhan Yang, Xudong Han, Weijie Guo, Fang Wan, Jia Pan, Chaoyang Song
Learning-based Optoelectronically Innervated Tactile Finger for Rigid-Soft Interactive Grasping Conference
IEEE International Conference on Robotics and Automation (ICRA2021), Xi’an, China, 2021, (Dual-track Submission with RAL: https://doi.org/10.1109/LRA.2021.3065186).
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Conf - ICRA, Special - Dual-Track
@conference{Yang2021LearningBased-ICRA,
title = {Learning-based Optoelectronically Innervated Tactile Finger for Rigid-Soft Interactive Grasping},
author = {Linhan Yang and Xudong Han and Weijie Guo and Fang Wan and Jia Pan and Chaoyang Song},
doi = {10.1109/LRA.2021.3065186},
year = {2021},
date = {2021-05-30},
urldate = {2021-05-30},
booktitle = {IEEE International Conference on Robotics and Automation (ICRA2021)},
issue = {2},
address = {Xi’an, China},
abstract = {This letter presents a novel design of a soft tactile finger with omni-directional adaptation using multi-channel optical fibers for rigid-soft interactive grasping. Machine learning methods are used to train a model for real-time prediction of force, torque, and contact using the tactile data collected. We further integrated such fingers in a reconfigurable gripper design with three fingers so that the finger arrangement can be actively adjusted in real-time based on the tactile data collected during grasping, achieving the process of rigid-soft interactive grasping. Detailed sensor calibration and experimental results are also included to further validate the proposed design for enhanced grasping robustness.},
note = {Dual-track Submission with RAL: https://doi.org/10.1109/LRA.2021.3065186},
keywords = {Authorship - Corresponding, Conf - ICRA, Special - Dual-Track},
pubstate = {published},
tppubtype = {conference}
}
Weijie Guo, Baiyue Wang, Shihao Feng, Hongdong Yi, Fang Wan, Chaoyang Song
Volumetrically Enhanced Soft Actuator with Proprioceptive Sensing Conference
IEEE International Conference on Soft Robotics (RoboSoft2021), New Haven, CT, USA, 2021, (Dual-track Submission with RAL: https://doi.org/10.1109/LRA.2021.3072859).
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Conf - RoboSoft, Special - Dual-Track
@conference{Guo2021VolumetricallyEnhanced,
title = {Volumetrically Enhanced Soft Actuator with Proprioceptive Sensing},
author = {Weijie Guo and Baiyue Wang and Shihao Feng and Hongdong Yi and Fang Wan and Chaoyang Song},
doi = {10.1109/LRA.2021.3072859},
year = {2021},
date = {2021-04-12},
urldate = {2021-04-12},
booktitle = {IEEE International Conference on Soft Robotics (RoboSoft2021)},
address = {New Haven, CT, USA},
abstract = {Soft robots often show a superior power-to-weight ratio using highly compliant, light-weight material, which leverages various bio-inspired body designs to generate desirable deformations for life-like motions. In this letter, given that most material used for soft robots is light-weight in general, we propose a volumetrically enhanced design strategy for soft robots, providing a novel design guideline to govern the form factor of soft robots. We present the design, modeling, and optimization of a volumetrically enhanced soft actuator (VESA) with linear and rotary motions, respectively, achieving superior force and torque output, linear and rotary displacement, and overall extension ratio per unit volume. We further explored VESA's proprioceptive sensing capability by validating the output force and torque through analytical modeling and experimental verification. Our results show that the volumetric metrics hold the potential to be used as a practical design guideline to optimize soft robots’ engineering performance.},
note = {Dual-track Submission with RAL: https://doi.org/10.1109/LRA.2021.3072859},
keywords = {Authorship - Corresponding, Conf - RoboSoft, Special - Dual-Track},
pubstate = {published},
tppubtype = {conference}
}
Haiyang Jiang, Yonglin Jing, Ning Guo, Weijie Guo, Fang Wan, Chaoyang Song
Lobster-inspired Finger Surface Design for Grasping with Enhanced Robustness Conference
IEEE International Conference on Soft Robotics (RoboSoft2021), New Haven, CT, USA, 2021.
Links | BibTeX | Tags: Authorship - Corresponding, Conf - RoboSoft
@conference{Jiang2021LobsterInspired,
title = {Lobster-inspired Finger Surface Design for Grasping with Enhanced Robustness},
author = {Haiyang Jiang and Yonglin Jing and Ning Guo and Weijie Guo and Fang Wan and Chaoyang Song},
doi = {10.1109/RoboSoft51838.2021.9479215},
year = {2021},
date = {2021-04-12},
urldate = {2021-04-12},
booktitle = {IEEE International Conference on Soft Robotics (RoboSoft2021)},
address = {New Haven, CT, USA},
keywords = {Authorship - Corresponding, Conf - RoboSoft},
pubstate = {published},
tppubtype = {conference}
}
Fang Wan, Haokun Wang, Xiaobo Liu, Linhan Yang, Chaoyang Song
DeepClaw: A Robotic Hardware Benchmarking Platform for Learning Object Manipulation Conference
IEEE/ASME International Conference on Advanced Intelligent Mechatronics (AIM2020), Boston, MA, USA, 2020.
Links | BibTeX | Tags: Authorship - Corresponding, Conf - AIM
@conference{Wan2020DeepClaw1.0,
title = {DeepClaw: A Robotic Hardware Benchmarking Platform for Learning Object Manipulation},
author = {Fang Wan and Haokun Wang and Xiaobo Liu and Linhan Yang and Chaoyang Song},
doi = {10.1109/aim43001.2020.9159011},
year = {2020},
date = {2020-07-06},
urldate = {2020-07-06},
booktitle = {IEEE/ASME International Conference on Advanced Intelligent Mechatronics (AIM2020)},
address = {Boston, MA, USA},
keywords = {Authorship - Corresponding, Conf - AIM},
pubstate = {published},
tppubtype = {conference}
}
Linhan Yang, Fang Wan, Haokun Wang, Xiaobo Liu, Yujia Liu, Jia Pan, Chaoyang Song
Rigid-Soft Interactive Learning for Robust Grasping Conference
IEEE International Conference on Robotics and Automation (ICRA2020), Paris, France, 2020, (Dual-track Submission with RAL: https://doi.org/10.1109/lra.2020.2969932).
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Conf - ICRA, Special - Dual-Track
@conference{Yang2020RigidSoft-ICRA,
title = {Rigid-Soft Interactive Learning for Robust Grasping},
author = {Linhan Yang and Fang Wan and Haokun Wang and Xiaobo Liu and Yujia Liu and Jia Pan and Chaoyang Song},
doi = {10.1109/LRA.2020.2969932},
year = {2020},
date = {2020-05-31},
urldate = {2020-05-31},
booktitle = {IEEE International Conference on Robotics and Automation (ICRA2020)},
address = {Paris, France},
abstract = {Robot learning is widely accepted by academia and industry with its potentials to transform autonomous robot control through machine learning. Inspired by widely used soft fingers on grasping, we propose a method of rigid-soft interactive learning, aiming at reducing the time of data collection. In this letter, we classify the interaction categories into Rigid-Rigid, Rigid-Soft, SoftRigid according to the interaction surface between grippers and target objects. We find experimental evidence that the interaction types between grippers and target objects play an essential role in the learning methods. We use soft, stuffed toys for training, instead of everyday objects, to reduce the integration complexity and computational burden. Although the stuffed toys are limited in reflecting the physics of finger-object interaction in real-life scenarios, we exploit such rigid-soft interaction by changing the gripper fingers to the soft ones when dealing with rigid, daily-life items such as the Yale-CMU-Berkeley (YCB) objects. With a small data collection of 5 K picking attempts in total, our results suggest that such Rigid-Soft and Soft-Rigid interactions are transferable. Moreover, the combination of such interactions shows better performance on the grasping test. We also explore the effect of the grasp type on the learning method by changing the gripper configurations. We achieve the best grasping performance at 97.5% for easy YCB objects and 81.3% for difficult YCB objects while using a precise grasp with a two-soft-finger gripper to collect training data and power grasp with a four-soft-finger gripper to test the grasp policy.},
note = {Dual-track Submission with RAL: https://doi.org/10.1109/lra.2020.2969932},
keywords = {Authorship - Corresponding, Conf - ICRA, Special - Dual-Track},
pubstate = {published},
tppubtype = {conference}
}
Fang Wan, Haokun Wang, Jiyuan Wu, Yujia Liu, Sheng Ge, Chaoyang Song
A Reconfigurable Design for Omni-Adaptive Grasp Learning Conference
IEEE International Conference on Soft Robotics (RoboSoft2020), New Haven, CT, USA, 2020, (Dual-track Submission with RAL: https://doi.org/10.1109/lra.2020.2982059).
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Conf - RoboSoft, Special - Dual-Track
@conference{Wan2020ReconfigurableDesign,
title = {A Reconfigurable Design for Omni-Adaptive Grasp Learning},
author = {Fang Wan and Haokun Wang and Jiyuan Wu and Yujia Liu and Sheng Ge and Chaoyang Song},
doi = {10.1109/LRA.2020.2982059},
year = {2020},
date = {2020-05-15},
urldate = {2020-05-15},
booktitle = {IEEE International Conference on Soft Robotics (RoboSoft2020)},
address = {New Haven, CT, USA},
abstract = {The engineering design of robotic grippers presents an ample design space for optimization towards robust grasping. In this letter, we investigate how learning method can be used to support the design reconfiguration of robotic grippers for grasping using a novel soft structure with omni-directional adaptation. We propose a gripper system that is reconfigurable in terms of the number and arrangement of the proposed finger, which generates a large number of possible design configurations. Such design reconfigurations with omni-adaptive fingers enables us to systematically investigate the optimal arrangement of the fingers towards robust grasping. Furthermore, we adopt a learning-based method as the baseline to benchmark the effectiveness of each design configuration. As a result, we found that the 3-finger radial configuration is suitable for space-saving and cost-effectiveness, achieving an average 96% grasp success rate on seen and novel objects selected from the YCB dataset. The 4-finger radial arrangement can be applied to cases that require a higher payload with even distribution. We achieved dimension reduction using the radial gripper design with the removal of z-axis rotation during grasping. We also reported the different outcomes with or without friction enhancement of the soft finger network.},
note = {Dual-track Submission with RAL: https://doi.org/10.1109/lra.2020.2982059},
keywords = {Authorship - Corresponding, Conf - RoboSoft, Special - Dual-Track},
pubstate = {published},
tppubtype = {conference}
}
Xia Wu, Haiyuan Liu, Ziqi Liu, Mingdong Chen, Fang Wan, Chenglong Fu, Harry Asada, Zheng Wang, Chaoyang Song
Robotic Cane as a Soft SuperLimb for Elderly Sit-to-Stand Assistance Conference
IEEE International Conference on Soft Robotics (RoboSoft2020), New Haven, CT, USA, 2020.
Links | BibTeX | Tags: Authorship - Corresponding, Conf - RoboSoft
@conference{Wu2020RoboticCane,
title = {Robotic Cane as a Soft SuperLimb for Elderly Sit-to-Stand Assistance},
author = {Xia Wu and Haiyuan Liu and Ziqi Liu and Mingdong Chen and Fang Wan and Chenglong Fu and Harry Asada and Zheng Wang and Chaoyang Song},
doi = {10.1109/robosoft48309.2020.9116028},
year = {2020},
date = {2020-05-15},
urldate = {2020-05-15},
booktitle = {IEEE International Conference on Soft Robotics (RoboSoft2020)},
address = {New Haven, CT, USA},
keywords = {Authorship - Corresponding, Conf - RoboSoft},
pubstate = {published},
tppubtype = {conference}
}
Zeyi Yang, Sheng Ge, Fang Wan, Yujia Liu, Chaoyang Song
Scalable Tactile Sensing for an Omni-adaptive Soft Robot Finger Conference
IEEE International Conference on Soft Robotics (RoboSoft2020), New Haven, CT, USA, 2020.
Links | BibTeX | Tags: Authorship - Corresponding, Conf - RoboSoft
@conference{Yang2020ScalableTactile,
title = {Scalable Tactile Sensing for an Omni-adaptive Soft Robot Finger},
author = {Zeyi Yang and Sheng Ge and Fang Wan and Yujia Liu and Chaoyang Song},
doi = {10.1109/robosoft48309.2020.9116026},
year = {2020},
date = {2020-05-15},
urldate = {2020-05-15},
booktitle = {IEEE International Conference on Soft Robotics (RoboSoft2020)},
address = {New Haven, CT, USA},
keywords = {Authorship - Corresponding, Conf - RoboSoft},
pubstate = {published},
tppubtype = {conference}
}
Fang Wan, Zheng Wang, Brooke Franchuk, Xinyao Hu, Zhenglong Sun, Chaoyang Song
Hybrid Actuator Design for a Gait Augmentation Wearable Conference
IEEE International Conference on Robotics and Biomimetics (ROBIO2017), Macau, 2017.
Links | BibTeX | Tags: Authorship - Corresponding, Conf - ROBIO
@conference{Wan2017HybridActuator,
title = {Hybrid Actuator Design for a Gait Augmentation Wearable},
author = {Fang Wan and Zheng Wang and Brooke Franchuk and Xinyao Hu and Zhenglong Sun and Chaoyang Song},
doi = {10.1109/robio.2017.8324761},
year = {2017},
date = {2017-12-05},
urldate = {2017-12-05},
booktitle = {IEEE International Conference on Robotics and Biomimetics (ROBIO2017)},
address = {Macau},
keywords = {Authorship - Corresponding, Conf - ROBIO},
pubstate = {published},
tppubtype = {conference}
}
Yaohui Chen, Sing Le, Qiao Chu Tan, Oscar Lau, Chaoyang Song
A Lobster-Inspired Hybrid Actuator with Rigid and Soft Components Conference
ASME International Design Engineering Technical Conferences and Computers and Information in Engineering Conference (DETC/CIE2017), Cleveland, Ohio, USA, 2017.
Links | BibTeX | Tags: Authorship - Corresponding, Conf - DETC/CIE
@conference{Chen2017ALobsterDETC,
title = {A Lobster-Inspired Hybrid Actuator with Rigid and Soft Components},
author = {Yaohui Chen and Sing Le and Qiao Chu Tan and Oscar Lau and Chaoyang Song},
doi = {10.1115/detc2017-68082},
year = {2017},
date = {2017-08-16},
urldate = {2017-08-16},
booktitle = {ASME International Design Engineering Technical Conferences and Computers and Information in Engineering Conference (DETC/CIE2017)},
address = {Cleveland, Ohio, USA},
keywords = {Authorship - Corresponding, Conf - DETC/CIE},
pubstate = {published},
tppubtype = {conference}
}
Yaohui Chen, Sing Le, Qiao Chu Tan, Oscar Lau, Fang Wan, Chaoyang Song
A Lobster-inspired Robotic Glove for Hand Rehabilitation Conference
IEEE International Conference on Robotics and Automation (ICRA2017), Marina Bay Sands, Singapore, 2017.
Links | BibTeX | Tags: Authorship - Corresponding, Conf - ICRA
@conference{Chen2017ALobsterICRA,
title = {A Lobster-inspired Robotic Glove for Hand Rehabilitation},
author = {Yaohui Chen and Sing Le and Qiao Chu Tan and Oscar Lau and Fang Wan and Chaoyang Song},
doi = {10.1109/icra.2017.7989556},
year = {2017},
date = {2017-05-29},
urldate = {2017-05-29},
booktitle = {IEEE International Conference on Robotics and Automation (ICRA2017)},
address = {Marina Bay Sands, Singapore},
keywords = {Authorship - Corresponding, Conf - ICRA},
pubstate = {published},
tppubtype = {conference}
}
Yaohui Chen, Sing Le, Qiao Chu Tan, Oscar Lau, Fang Wan, Chaoyang Song
A Reconfigurable Hybrid Actuator with Rigid and Soft Components Conference
IEEE International Conference on Robotics and Automation (ICRA2017), Marina Bay Sands, Singapore, 2017.
Links | BibTeX | Tags: Authorship - Corresponding, Conf - ICRA
@conference{Chen2017AReconfigurable,
title = {A Reconfigurable Hybrid Actuator with Rigid and Soft Components},
author = {Yaohui Chen and Sing Le and Qiao Chu Tan and Oscar Lau and Fang Wan and Chaoyang Song},
doi = {10.1109/icra.2017.7988691},
year = {2017},
date = {2017-05-29},
urldate = {2017-05-29},
booktitle = {IEEE International Conference on Robotics and Automation (ICRA2017)},
address = {Marina Bay Sands, Singapore},
keywords = {Authorship - Corresponding, Conf - ICRA},
pubstate = {published},
tppubtype = {conference}
}
Chaoyang Song, Jianxi Luo, Katja Hölttä-Otto, Kevin Otto
Product Innovation Differences between New Ventures and Incumbent Firms Conference
Annual Meeting of the Academy of Management (AoM2014), Philadelphia, PA, USA, 2014.
Links | BibTeX | Tags: Authorship - Corresponding, Authorship - First Author, Conf - AoM
@conference{Song2014ProductInnovation,
title = {Product Innovation Differences between New Ventures and Incumbent Firms},
author = {Chaoyang Song and Jianxi Luo and Katja Hölttä-Otto and Kevin Otto},
doi = {10.5465/ambpp.2014.13204abstract},
year = {2014},
date = {2014-08-01},
urldate = {2014-08-01},
booktitle = {Annual Meeting of the Academy of Management (AoM2014)},
address = {Philadelphia, PA, USA},
keywords = {Authorship - Corresponding, Authorship - First Author, Conf - AoM},
pubstate = {published},
tppubtype = {conference}
}
Extended Abstracts
Haoran Sun, Linhan Yang, Zeqing Zhang, Ning Guo, Lei Yang, Fang Wan, Chaoyang Song, Jia Pan
CopGNN: Learning End-to-End Cloth Coverage Prediction via Graph Neural Networks Workshop
2024, (Extended Abstract accepted to IROS 2024 Workshop on Benchmarking via Competitions in Robotic Grasping and Manipulation).
@workshop{Sun2024CopGNN,
title = {CopGNN: Learning End-to-End Cloth Coverage Prediction via Graph Neural Networks},
author = {Haoran Sun and Linhan Yang and Zeqing Zhang and Ning Guo and Lei Yang and Fang Wan and Chaoyang Song and Jia Pan},
url = {https://sites.google.com/view/iros2024-workshop-bench-in-rgm/},
year = {2024},
date = {2024-10-13},
urldate = {2024-10-13},
abstract = {Cloth manipulation in robotics, such as folding or unfolding fabrics, remains challenging due to deformable materials' complex and nonlinear dynamics, which can adopt infinite configurations. As Team Greater Bay, we participated in the ICRA 2024 Cloth Competition and scored an Average Coverage of 0.53 (the 1st place team scored 0.60). This extended abstract presents our Coverage Prediction Graph Neural Network (CopGNN) approach implemented for this competition. Instead of directly estimating the cloth's configuration, our method implicitly infers the unknown state using a Graph Neural Network (GNN). It predicts the resultant coverage area from multiple grasping points using a second GNN without relying on an explicit dynamics model. Contributions of this work include: (1) Developed a comprehensive simulation pipeline to generate a large-scale dataset tailored to the cloth manipulation task. (2) Proposed an end-to-end approach to predict the coverage area using only the hanging cloth's depth image. (3) Introduced a heuristic-based sampling strategy to enhance the robustness of zero-shot sim-to-real transfer.},
note = {Extended Abstract accepted to IROS 2024 Workshop on Benchmarking via Competitions in Robotic Grasping and Manipulation},
keywords = {},
pubstate = {published},
tppubtype = {workshop}
}
Tianyu Wu, Sheng Ge, Yujian Dong, Ronghan Xu, Fang Wan, Chaoyang Song
From DeepClaw to MagiClaw: Towards Universal Action Embodiment Workshop
2024, (Extended Abstract accepted to IROS 2024 Workshop on Environment Dynamics Matters: Embodied Navigation to Movable Objects).
@workshop{Wu2024MagiClaw,
title = {From DeepClaw to MagiClaw: Towards Universal Action Embodiment},
author = {Tianyu Wu and Sheng Ge and Yujian Dong and Ronghan Xu and Fang Wan and Chaoyang Song},
url = {https://edmws.github.io/},
year = {2024},
date = {2024-10-13},
urldate = {2024-10-13},
note = {Extended Abstract accepted to IROS 2024 Workshop on Environment Dynamics Matters: Embodied Navigation to Movable Objects},
keywords = {},
pubstate = {published},
tppubtype = {workshop}
}
Chaoyang Song
The Design and Learning of Overconstrained Mechanisms towards Overconstrained Robotics Workshop
Mechanism and Machine Theory Symposium, Guimarães, Portugal, 2024, (Extended Abstract accepted to Mechanism and Machine Theory Symposium).
@workshop{Song2024TheDesign,
title = {The Design and Learning of Overconstrained Mechanisms towards Overconstrained Robotics},
author = {Chaoyang Song},
url = {https://mmtsymposium.com/
https://iftomm-world.org/conferences/mmt-symposium/#:~:text=The%20MMT%20Symposium%20will%20be%20held%20in%20Guimar%C3%A3es%20-%20Portugal,},
year = {2024},
date = {2024-06-26},
urldate = {2024-06-26},
booktitle = {Mechanism and Machine Theory Symposium},
address = {Guimarães, Portugal},
abstract = {Overconstrained mechanisms play a pivotal role in mechanism theory, combining mathematical science with engineering design to provide the foundational kinematics for emerging applications in modern machinery and robotic systems. Calculating a mechanism’s mobility is among an engineer’s first steps towards building machines as desired, which can be a challenging task. The paradox of overconstrained mechanisms is, quoting Prof. Andreas Müller, that “although one may not construct a ‘perfectly overconstrained’ mechanism, one will, and this is the design goal, end up with an ‘almost overconstrained’ mechanism ... Therefore, and due to the flexibility of links and joint clearances, the real mechanism will exhibit almost the type of motion of its perfect (overconstrained) prototype. This is why the understanding of overconstrained mechanisms is important though.” The Mechanism and Machine Theory is the leading platform attracting researchers contributing to this research topic. However, a long-standing challenge remains to push the overconstrained mechanisms from theoretical kinematics to engineering applications with advanced robotics, where an emerging field of “overconstrained robotics” may interest researchers in related fields of expertise.},
note = {Extended Abstract accepted to Mechanism and Machine Theory Symposium},
keywords = {},
pubstate = {published},
tppubtype = {workshop}
}
Doctoral Thesis
Chaoyang Song
Kinematic Study of Overconstrained Linkages and Design of Reconfigurable Mechanisms PhD Thesis
Nanyang Technological University, 2013.
@phdthesis{Song2013KinematicStudy,
title = {Kinematic Study of Overconstrained Linkages and Design of Reconfigurable Mechanisms},
author = {Chaoyang Song},
url = {https://hdl.handle.net/10356/55261},
year = {2013},
date = {2013-02-14},
urldate = {2013-02-14},
address = {Singapore},
school = {Nanyang Technological University},
abstract = {This dissertation explores the possibilities to design reconfigurable mechanisms using the kinematic and geometric properties of existing overconstrained linkages with revolute joints. Despite the large number of overconstrained linkages reported in literatures, there lacks of a comprehensive study into the relationship among them, which limits the understanding of the overconstrained linkages and their potential applications. The first part of this dissertation has been devoted to the systematic generalization of a series of double-Goldberg linkage families, in which the relationship between a number of existing linkages and their variational cases has been revealed. The common link-pair and common Bennett-linkage methods have been proposed to connect a Goldberg 5R linkage and a subtractive Goldberg 5R linkage to form six types of overconstrained linkage closures. Three sub-families, Wohlhart’s double-Goldberg linkages, mixed double-Goldberg linkages and double-subtractive-Goldberg linkages, have been generalized to represent the original cases, variational cases and subtractive cases of double-Goldberg linkage family. A substantial source of design for reconfigurable mechanisms in the Bennett-based linkage family has been presented in this part. In the second part, the kinematic study has been focused on the general line-symmetric Bricard linkage. The closure equations of the original and revised general line-symmetric Bricard linkages have been derived in explicit forms. For the general line-symmetric Bricard linkage, two independent and distinct linkage closures have been discovered. It has also been revealed that the revised cases are equivalent to the original cases with different setups on joint-axis directions. The potential of designing the reconfigurable mechanism through kinematic singularity has been demonstrated with the bifurcation behavior of the special line-symmetric Bricard linkage with zero offsets. The conceptual designs of reconfigurable mechanisms based on overconstrained linkages have been explored in the final part. Both the analytical and construct method have been presented to design morphing structures using overconstrained linkages. Based on the double-Goldberg linkage and the general line-symmetric Bricard linkage, reconfigurable mechanisms have been designed with multiple operation forms between 6R and 4R linkages. Furthermore, a generic method of link-pair replacement has been developed for reconfiguration purpose, which has been applied to reconfigure the topology of different Bennett linkage networks in order to obtain different overconstrained mechanisms. Results in this dissertation could lead to the substantial advancement in the design of reconfigurable mechanism with kinematic singularities. In the future work, the methods could be applied to design advanced reconfigurable robotic platforms with less actuators but more structural support.},
keywords = {},
pubstate = {published},
tppubtype = {phdthesis}
}