




Working Papers
Sorry, no publications matched your criteria.
Under Review
Sorry, no publications matched your criteria.
Journal Articles
Shuqiao Zhong, Wanghongjie Qiu, Xudong Han, Chaoyang Song, Zhiyuan Zhou, Fang Wan, Jian Lin
Bio-inspired Rigid-Soft Interaction for Robust Picking Underwater Journal Article
In: Ocean Engineering, vol. 346, pp. 123774, 2026.
Abstract | Links | BibTeX | Tags: Authorship - Co-Author, JCR Q1, Jour - Ocean Eng.
@article{Zhong2025Bioinspired,
title = {Bio-inspired Rigid-Soft Interaction for Robust Picking Underwater},
author = {Shuqiao Zhong and Wanghongjie Qiu and Xudong Han and Chaoyang Song and Zhiyuan Zhou and Fang Wan and Jian Lin},
doi = {10.1016/j.oceaneng.2025.123774},
year = {2026},
date = {2026-02-15},
urldate = {2026-02-15},
journal = {Ocean Engineering},
volume = {346},
pages = {123774},
abstract = {We present a Lobster-inspired Soft Touch Enhanced Rigid Grasp (LobSTER-Grasp) gripper for robust underwater manipulation, replacing conventional grippers on commercial underwater robotic arms by integrating passively adaptive, networked soft fingers with rigid fingers and a newly designed mounting structure. We demonstrate that the networked soft fingers exhibit significantly lower hydrodynamic drag than solid fingers, mitigating the disturbance often induced when approaching underwater targets. In addition, we systematically investigate the parameters of lobster-inspired serrations on the finger surfaces, identifying a design that achieves 261% higher frictional force compared with standard uniform serrations. We embed these serrations into rigid and soft fingers for enhanced grasp reliability. Through a user study, we show that the LobSTER-Grasp gripper improves 38.2% grasping efficiency and 77.9% task versatility compared to a commercial counterpart, and we further validate its effectiveness in a tank environment using a remotely operated vehicle. Our findings suggest that this bio-inspired, rigid–soft coupling design offers a simple yet superior solution for more robust and adaptive underwater operations with strong potential in marine applications such as resource exploitation, ecological monitoring, and deep-sea exploration.},
keywords = {Authorship - Co-Author, JCR Q1, Jour - Ocean Eng.},
pubstate = {published},
tppubtype = {article}
}
Yuping Gu, Bangchao Huang, Haoran Sun, Ronghan Xu, Jiayi Yin, Wei Zhang, Fang Wan, Jia Pan, Chaoyang Song
One-DoF Robotic Design of Overconstrained Limbs with Energy-Efficient, Self-Collision-Free Motion Journal Article
In: Fundamental Research, vol. 0, iss. 0, no. 0, pp. 0, 2025, (Accepted).
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q1, Jour - Fund. Res. (FMRE)
@article{Gu2025OCLimbDesign,
title = {One-DoF Robotic Design of Overconstrained Limbs with Energy-Efficient, Self-Collision-Free Motion},
author = {Yuping Gu and Bangchao Huang and Haoran Sun and Ronghan Xu and Jiayi Yin and Wei Zhang and Fang Wan and Jia Pan and Chaoyang Song},
url = {https://doi.org/10.48550/arXiv.2509.22002},
doi = {10.1016/j.fmre.2025.09.023},
year = {2025},
date = {2025-09-26},
urldate = {2025-09-26},
journal = {Fundamental Research},
volume = {0},
number = {0},
issue = {0},
pages = {0},
abstract = {While it is expected to build robotic limbs with multiple degrees of freedom (DoF) inspired by nature, a single DoF design remains fundamental, providing benefits that include, but are not limited to, simplicity, robustness, cost-effectiveness, and efficiency. Mechanisms, especially those with multiple links and revolute joints connected in closed loops, play an enabling factor in introducing motion diversity for 1-DoF systems, which are usually constrained by self-collision during a full-cycle range of motion. This study presents a novel computational approach to designing one-degree-of-freedom (1-DoF) overconstrained robotic limbs for a desired spatial trajectory, while achieving energy-efficient, self-collision-free motion in full-cycle rotations. Firstly, we present the geometric optimization problem of linkage-based robotic limbs in a generalized formulation for self-collision-free design. Next, we formulate the spatial trajectory generation problem with the overconstrained linkages by optimizing the similarity and dynamic-related metrics. We further optimize the geometric shape of the overconstrained linkage to ensure smooth and collision-free motion driven by a single actuator. We validated our proposed method through various experiments, including personalized automata and bio-inspired hexapod robots. The resulting hexapod robot, featuring overconstrained robotic limbs, demonstrated outstanding energy efficiency during forward walking.},
note = {Accepted},
keywords = {Authorship - Corresponding, JCR Q1, Jour - Fund. Res. (FMRE)},
pubstate = {published},
tppubtype = {article}
}
Chengxiao Dong, Yu Pan, Xuanyi Dai, Edmond Ho Nang, Chaoyang Song, Fang Wan
Enhancing Full-Arch Intraoral Measurement with Robotic Process Automation Journal Article
In: Journal of Bionic Engineering, vol. 0, iss. October, no. 0, pp. 0, 2025, (Accepted).
Abstract | Links | BibTeX | Tags: Authorship - Co-Author, JCR Q1, Jour - J. Bionic Eng. (JBE)
@article{Dong2024EnhancingFull,
title = {Enhancing Full-Arch Intraoral Measurement with Robotic Process Automation},
author = {Chengxiao Dong and Yu Pan and Xuanyi Dai and Edmond Ho Nang and Chaoyang Song and Fang Wan},
doi = {10.1007/s42235-025-00784-8},
year = {2025},
date = {2025-08-26},
urldate = {2025-08-26},
journal = {Journal of Bionic Engineering},
volume = {0},
number = {0},
issue = {October},
pages = {0},
abstract = {Intraoral scanning has become integral to digital workflows in dental implantology, offering a more efficient and comfortable alternative to conventional impression techniques. For complete edentulism, accurate scanning is crucial to successful full-arch dental implant rehabilitation. However, the absence of well-defined anatomical landmarks can lead to cumulative errors during merging sequential scans, often surpassing acceptable thresholds. Current mitigation strategies rely on manual adjustments in computer-aided design (CAD) software, a time-intensive process that depends heavily on the operator's expertise. This study presents a novel textit{segment-match-correct} robotic process automation (RPA) workflow to enhance full-arch intraoral scans' positioning accuracy and efficiency. By leveraging 3D registration algorithms, the proposed method improves implant positioning accuracy while significantly reducing manual labor. To assess the robustness of this workflow, we simulated four types of noise to evaluate their impact on scanning errors. Our findings demonstrate that the RPA workflow reduces dentist workload from 5-8 minutes per scan to less than 1 minute (about 57 seconds) while achieving a lower linear error of 45.16 $pm$ 23.76 unit{micrometer}, outperforming traditional scanning methods. We could replicate linear and angular deviations observed in real-world scans by simulating cumulative errors. This workflow improves the accuracy and efficiency of complete-arch implant rehabilitation and provides a practical solution to reduce cumulative scanning errors. Additionally, the noise simulations offer valuable insights into the origins of these errors, further optimizing intraoral scanner performance.},
note = {Accepted},
keywords = {Authorship - Co-Author, JCR Q1, Jour - J. Bionic Eng. (JBE)},
pubstate = {published},
tppubtype = {article}
}
Chengxiao Dong, Xuanyi Dai, Yu Pan, Wanghongjie Qiu, Sen Li, Tianyu Wu, Yijie Jin, He Wang, Chaoyang Song, Fang Wan
Teaching Oral Care via Vision-based Deformable Perception Journal Article
In: Soft Science, vol. 5, no. 3, pp. 36, 2025.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q1, Jour - Soft Sci. (SS)
@article{Dong2025TeachingOral,
title = {Teaching Oral Care via Vision-based Deformable Perception},
author = {Chengxiao Dong and Xuanyi Dai and Yu Pan and Wanghongjie Qiu and Sen Li and Tianyu Wu and Yijie Jin and He Wang and Chaoyang Song and Fang Wan},
url = {https://github.com/ancorasir/VBDeformP4OralCare},
doi = {10.20517/ss.2025.14},
year = {2025},
date = {2025-07-30},
urldate = {2025-07-30},
journal = {Soft Science},
volume = {5},
number = {3},
pages = {36},
abstract = {This paper presents a novel, cost-effective sensor platform based on Vision-based Deformable Perception (VBDeformP) for community oral health education. The system integrates a 3D-printed thermoplastic polyurethane (TPU) soft structure with a rigid resin frame and an ArUco marker to encode six-dimensional force and torque information. By transforming force estimation into a marker-based pose tracking problem, the VBDeformP sensor achieves accurate and robust force/torque inference under quasi-static and dynamic conditions using machine learning models. An adaptive image binarization algorithm extends reliable marker detection across a wide illumination range (10–5,000 lux), ensuring consistent performance in realistic dental scenarios. Experimental validation involving 10 healthy participants performing standardized brushing tasks demonstrated that the sensor attains measurement accuracies comparable to a commercial ATI Axia80-M20 sensor, with mean absolute errors of 0.55 N (2.19% relative error) and 0.067 Nm (2.68% relative error) for quasi-static forces and torques, and 0.16 N (4.10% relative error) and 0.023 Nm (5.75% relative error) under dynamic conditions. Moreover, the system's real-time brushing region classification algorithm achieved an overall accuracy of 98.12%, further highlighting its potential to deliver immediate, personalized oral hygiene guidance. Its low cost, rapid initialization, portability, and scalable fabrication render it a promising solution for enhancing oral health education in community settings. },
keywords = {Authorship - Corresponding, JCR Q1, Jour - Soft Sci. (SS)},
pubstate = {published},
tppubtype = {article}
}
Xudong Han, Ning Guo, Ronghan Xu, Fang Wan, Chaoyang Song
Anchoring Morphological Representations Unlocks Latent Proprioception in Soft Robots Journal Article
In: Advanced Intelligent Systems, vol. 0, no. 0, pp. 0, 2025, (Accepted).
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q1, Jour - Adv. Intell. Syst. (AIS)
@article{Han2025AnchoringMorphological,
title = {Anchoring Morphological Representations Unlocks Latent Proprioception in Soft Robots},
author = {Xudong Han and Ning Guo and Ronghan Xu and Fang Wan and Chaoyang Song},
url = {https://github.com/ancorasir/ProSoRo},
doi = {10.1002/aisy.202500444},
year = {2025},
date = {2025-07-01},
urldate = {2025-06-05},
journal = {Advanced Intelligent Systems},
volume = {0},
number = {0},
pages = {0},
abstract = {This research addresses the need for robust proprioceptive methods that capture the continuous deformations of soft robots without relying on multiple sensors that hinder compliance. We propose a bio-inspired strategy called textit{latent proprioception}, which anchors the robot's overall deformation state to a single internal reference frame tracked by a miniature onboard camera. Through a multi-modal neural network trained on simulated and real data, we unify motion, force, and shape measurements into a shared representation in textit{latent codes}, inferring unseen states from readily measured signals. Our experimental results show that this approach accurately reconstructs full-body deformations and forces from minimal sensing data, enabling soft robots to adapt to complex object manipulation or safe human interaction tasks. The proposed framework exemplifies how biological principles can inform and enhance robotics by reducing sensor complexity and preserving mechanical flexibility. We anticipate that such hybrid system codesign will advance robotic capabilities, deepen our understanding of natural movement, and potentially translate back into healthcare and wearable technologies for living beings. This work paves the way for soft robots endowed with greater autonomy and resilience. All codes are available at GitHub: https://github.com/ancorasir/ProSoRo.},
note = {Accepted},
keywords = {Authorship - Corresponding, JCR Q1, Jour - Adv. Intell. Syst. (AIS)},
pubstate = {published},
tppubtype = {article}
}
Xi Xia, Xingxing Chen, Junli Shi, Zhibin Li, Bingfa Jiang, Kaixi Huang, Mengxue Guo, Zeyun Yang, Zelong Liao, Chaoyang Song, Chuanfei Guo
Micropillar-Enabled Tough Adhesion and Enhanced Sensing Journal Article
In: Matter, vol. 8, iss. October, no. 10, pp. 102221, 2025.
Abstract | Links | BibTeX | Tags: Authorship - Co-Author, JCR Q1, Jour - Matter
@article{Xia2025MicropillarEnabled,
title = {Micropillar-Enabled Tough Adhesion and Enhanced Sensing},
author = {Xi Xia and Xingxing Chen and Junli Shi and Zhibin Li and Bingfa Jiang and Kaixi Huang and Mengxue Guo and Zeyun Yang and Zelong Liao and Chaoyang Song and Chuanfei Guo},
doi = {10.1016/j.matt.2025.102221},
year = {2025},
date = {2025-06-20},
urldate = {2025-06-20},
journal = {Matter},
volume = {8},
number = {10},
issue = {October},
pages = {102221},
abstract = {Skin-like soft sensors are a key technology for humanoid robots and wearables. Achieving both robust interfaces and promoted sensing performances in soft sensors may enable their applications in extreme mechanical conditions of high shear and large strain. However, introducing tough adhesion to the interfaces in a sensor often compromises its sensing properties. Here, we use micropillars of hyperbranched polyurethane with a diameter smaller than its length of flaw sensitivity serving as an adhesion layer for exceptional mechanical stability, and also as an adaptive spacer for enhanced sensing properties. We show a strong size effect of the structures to toughen the interface, with ultrahigh interfacial toughness up to 5095 J m-2 at a pillar diameter of 50 μm, which is one order of magnitude higher than the state-of-the-arts results. As a spacer, the micropillars provide enhanced sensitivity, adaptive limit of detection, rapid response to the acoustic range by decreasing the stiffness via elastic buckling. The sensors are ideal for the manipulation of heavy objects in humanoid robots and other applications. },
keywords = {Authorship - Co-Author, JCR Q1, Jour - Matter},
pubstate = {published},
tppubtype = {article}
}
Sen Li, Chengxiao Dong, Chaoyang Song, Fang Wan
ActiveSPN: Active Soft Polyhedral Networks with Pose Estimation for In-Finger Object Manipulation Journal Article
In: IEEE Robotics and Automation Letters, vol. 10, no. 8, pp. 8115 - 8122, 2025, (Accepted).
Abstract | Links | BibTeX | Tags: Authorship - Co-Author, JCR Q1, Jour - IEEE Robot. Autom. Lett. (RA-L)
@article{Li2025ActiveSPN,
title = {ActiveSPN: Active Soft Polyhedral Networks with Pose Estimation for In-Finger Object Manipulation},
author = {Sen Li and Chengxiao Dong and Chaoyang Song and Fang Wan},
url = {https://github.com/ancorasir/ActiveSPN},
doi = {10.1109/LRA.2025.3583616},
year = {2025},
date = {2025-06-16},
urldate = {2025-06-16},
journal = {IEEE Robotics and Automation Letters},
volume = {10},
number = {8},
pages = {8115 - 8122},
abstract = {Robotic grippers aim to replicate the remarkable functionalities of the human hand by providing advanced perception, adaptability, stability, and dexterity for complex tasks. Achieving these capabilities demands a sophisticated design hierarchy and robust perception mechanisms that ensure accurate manipulation. This paper introduces Active Soft Polyhedral Networks (ActiveSPN), a gripper design that leverages an active, non-biomimetic surface for precise in-hand manipulation. A vision system integrated directly into the fingers further facilitates accurate pose estimation of the in-finger object. The proposed system includes: (i) a soft polyhedral network featuring a transparent active belt to deliver complete three-dimensional adaptation and dexterous in-finger motion, and (ii) a generative learning-based pipeline for in-finger pose estimation. Experimental results demonstrate the ability of ActiveSPN to execute multi-degree-of-freedom in-finger manipulations, including two-axis rotation and one-axis translation. Moreover, the integrated vision-based pose estimation provides robust, real-time predictions, supporting consistent closed-loop control. Across diverse objects, the system achieves mean translational errors of 2.59 mm and rotational errors of 7 degrees, highlighting a promising paradigm for compact, efficient, and dexterous robotic manipulation. Codes are available at https://github.com/ancorasir/ActiveSPN.},
note = {Accepted},
keywords = {Authorship - Co-Author, JCR Q1, Jour - IEEE Robot. Autom. Lett. (RA-L)},
pubstate = {published},
tppubtype = {article}
}
Rongzheng Zhang, Wanghongjie Qiu, Jianuo Qiu, Yuqin Guo, Chengxiao Dong, Tuo Zhang, Juan Yi, Chaoyang Song, Harry Asada, Fang Wan
Multimodal Intention Recognition Combining Head Motion and Throat Vibration for Underwater Superlimbs Journal Article
In: IEEE Transactions on Automation Science and Engineering, vol. 0, no. 0, pp. 0, 2025.
Abstract | Links | BibTeX | Tags: Authorship - Co-Author, JCR Q1, Jour - IEEE Trans. Autom. Sci. Eng. (T-ASE)
@article{Zhang2024MultiModal,
title = {Multimodal Intention Recognition Combining Head Motion and Throat Vibration for Underwater Superlimbs},
author = {Rongzheng Zhang and Wanghongjie Qiu and Jianuo Qiu and Yuqin Guo and Chengxiao Dong and Tuo Zhang and Juan Yi and Chaoyang Song and Harry Asada and Fang Wan},
doi = {10.1109/TASE.2025.3554036},
year = {2025},
date = {2025-03-20},
urldate = {2025-03-20},
journal = {IEEE Transactions on Automation Science and Engineering},
volume = {0},
number = {0},
pages = {0},
abstract = {This paper presents a novel solution for underwater intention recognition that simultaneously detects head motion and throat vibration, enhancing multimodal human-robot interactions for underwater diving. The system pairs with an underwater supernumerary robotic limb (SuperLimb), providing propulsion assistance to reduce the diver's physical load and mental fatigue. An inertial measurement unit monitors head motion, while a throat microphone captures vocal vibrations. Learning algorithms process these signals to accurately interpret the diver's intentions and map them to the SuperLimb for posture management. The system features a compact design optimized for diving scenarios and includes a multimodal, real-time classification algorithm to distinguish various head motions and vocal signals. By collecting and analyzing underwater throat vibration data, the study demonstrates the feasibility of this approach, enabling continuous motion commands for enhanced diving assistance. The results show that the head motion recognition component of the system achieved a high classification accuracy of 95%, and throat vibration classification reached 86% accuracy on land and 89% underwater for various purposes.},
keywords = {Authorship - Co-Author, JCR Q1, Jour - IEEE Trans. Autom. Sci. Eng. (T-ASE)},
pubstate = {published},
tppubtype = {article}
}
Xudong Han, Ning Guo, Yu Jie, He Wang, Fang Wan, Chaoyang Song
On Flange-Based 3D Hand-Eye Calibration for Soft Robotic Tactile Welding Journal Article
In: Measurement, vol. 238, iss. October, pp. 115376, 2024.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q1, Jour - Measurement (MEAS)
@article{Han2024OnFlange,
title = {On Flange-Based 3D Hand-Eye Calibration for Soft Robotic Tactile Welding},
author = {Xudong Han and Ning Guo and Yu Jie and He Wang and Fang Wan and Chaoyang Song},
doi = {10.1016/j.measurement.2024.115376},
year = {2024},
date = {2024-10-01},
urldate = {2024-10-01},
journal = {Measurement},
volume = {238},
issue = {October},
pages = {115376},
abstract = {This paper investigates the direct application of standardized designs on the robot for conducting robot hand–eye calibration by employing 3D scanners with collaborative robots. The well-established geometric features of the robot flange are exploited by directly capturing its point cloud data. In particular, an iterative method is proposed to facilitate point cloud processing towards a refined calibration outcome. Several extensive experiments are conducted over a range of collaborative robots, including Universal Robots UR5 & UR10 e-series, Franka Emika, and AUBO i5 using an industrial-grade 3D scanner Photoneo Phoxi S & M and a commercial-grade 3D scanner Microsoft Azure Kinect DK. Experimental results show that translational and rotational errors converge efficiently to less than 0.28 mm and 0.25 degrees, respectively, achieving a hand–eye calibration accuracy as high as the camera’s resolution, probing the hardware limit. A welding seam tracking system is presented, combining the flange-based calibration method with soft tactile sensing. The experiment results show that the system enables the robot to adjust its motion in real-time, ensuring consistent weld quality and paving the way for more efficient and adaptable manufacturing processes.},
keywords = {Authorship - Corresponding, JCR Q1, Jour - Measurement (MEAS)},
pubstate = {published},
tppubtype = {article}
}
Ning Guo, Xudong Han, Shuqiao Zhong, Zhiyuan Zhou, Jian Lin, Fang Wan, Chaoyang Song
Reconstructing Soft Robotic Touch via In-Finger Vision Journal Article
In: Advanced Intelligent Systems, vol. 6, iss. October, no. 10, pp. 2400022, 2024.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Award - Front Cover, JCR Q1, Jour - Adv. Intell. Syst. (AIS)
@article{Guo2024ReconstructingSoft,
title = {Reconstructing Soft Robotic Touch via In-Finger Vision},
author = {Ning Guo and Xudong Han and Shuqiao Zhong and Zhiyuan Zhou and Jian Lin and Fang Wan and Chaoyang Song},
doi = {10.1002/aisy.202400022},
year = {2024},
date = {2024-10-01},
urldate = {2024-10-01},
journal = {Advanced Intelligent Systems},
volume = {6},
number = {10},
issue = {October},
pages = {2400022},
abstract = {Incorporating authentic tactile interactions into virtual environments presents a notable challenge for the emerging development of soft robotic metamaterials. In this study, a vision-based approach is introduced to learning proprioceptive interactions by simultaneously reconstructing the shape and touch of a soft robotic metamaterial (SRM) during physical engagements. The SRM design is optimized to the size of a finger with enhanced adaptability in 3D interactions while incorporating a see-through viewing field inside, which can be visually captured by a miniature camera underneath to provide a rich set of image features for touch digitization. Employing constrained geometric optimization, the proprioceptive process with aggregated multi-handles is modeled. This approach facilitates real-time, precise, and realistic estimations of the finger's mesh deformation within a virtual environment. Herein, a data-driven learning model is also proposed to estimate touch positions, achieving reliable results with impressive R2 scores of 0.9681, 0.9415, and 0.9541 along the x, y, and z axes. Furthermore, the robust performance of the proposed methods in touch-based human–cybernetic interfaces and human–robot collaborative grasping is demonstrated. In this study, the door is opened to future applications in touch-based digital twin interactions through vision-based soft proprioception.},
keywords = {Authorship - Corresponding, Award - Front Cover, JCR Q1, Jour - Adv. Intell. Syst. (AIS)},
pubstate = {published},
tppubtype = {article}
}
Ning Guo, Xudong Han, Shuqiao Zhong, Zhiyuan Zhou, Jian Lin, Jiansheng Dai, Fang Wan, Chaoyang Song
Proprioceptive State Estimation for Amphibious Tactile Sensing Journal Article
In: IEEE Transactions on Robotics, vol. 40, iss. September, pp. 4684-4698, 2024.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q1, Jour - IEEE Trans. Robot. (T-RO)
@article{Guo2024ProprioceptiveState,
title = {Proprioceptive State Estimation for Amphibious Tactile Sensing},
author = {Ning Guo and Xudong Han and Shuqiao Zhong and Zhiyuan Zhou and Jian Lin and Jiansheng Dai and Fang Wan and Chaoyang Song},
doi = {10.1109/TRO.2024.3463509},
year = {2024},
date = {2024-09-18},
urldate = {2024-09-18},
journal = {IEEE Transactions on Robotics},
volume = {40},
issue = {September},
pages = {4684-4698},
abstract = {This article presents a novel vision-based proprioception approach for a soft robotic finger that can estimate and reconstruct tactile interactions in terrestrial and aquatic environments. The key to this system lies in the finger's unique metamaterial structure, which facilitates omnidirectional passive adaptation during grasping, protecting delicate objects across diverse scenarios. A compact in-finger camera captures high-framerate images of the finger's deformation during contact, extracting crucial tactile data in real time. We present a volumetric discretized model of the soft finger and use the geometry constraints captured by the camera to find the optimal estimation of the deformed shape. The approach is benchmarked using a motion capture system with sparse markers and a haptic device with dense measurements. Both results show state-of-the-art accuracy, with a median error of 1.96 mm for overall body deformation, corresponding to 2.1 % of the finger's length. More importantly, the state estimation is robust in both on-land and underwater environments as we demonstrate its usage for underwater object shape sensing. This combination of passive adaptation and real-time tactile sensing paves the way for amphibious robotic grasping applications.},
key = {2024-J-TRO-ProprioceptiveState},
keywords = {Authorship - Corresponding, JCR Q1, Jour - IEEE Trans. Robot. (T-RO)},
pubstate = {published},
tppubtype = {article}
}
Tianyu Wu, Yujian Dong, Xiaobo Liu, Xudong Han, Yang Xiao, Jinqi Wei, Fang Wan, Chaoyang Song
Vision-based Tactile Intelligence with Soft Robotic Metamaterial Journal Article
In: Materials & Design, vol. 238, iss. February, pp. 112629, 2024.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q1, Jour - Mat. Des. (MADE)
@article{Wu2024VisionBasedSRM,
title = {Vision-based Tactile Intelligence with Soft Robotic Metamaterial},
author = {Tianyu Wu and Yujian Dong and Xiaobo Liu and Xudong Han and Yang Xiao and Jinqi Wei and Fang Wan and Chaoyang Song},
doi = {10.1016/j.matdes.2024.112629},
year = {2024},
date = {2024-02-01},
urldate = {2024-02-01},
booktitle = {IEEE International Conference on Advanced Robotics and Mechatronics (ICARM2024)},
journal = {Materials & Design},
volume = {238},
issue = {February},
pages = {112629},
abstract = {Robotic metamaterials represent an innovative approach to creating synthetic structures that combine desired material characteristics with embodied intelligence, blurring the boundaries between materials and machinery. Inspired by the functional qualities of biological skin, integrating tactile intelligence into these materials has gained significant interest for research and practical applications. This study introduces a Soft Robotic Metamaterial (SRM) design featuring omnidirectional adaptability and superior tactile sensing, combining vision-based motion tracking and machine learning. The study compares two sensory integration methods to a state-of-the-art motion tracking system and force/torque sensor baseline: an internal-vision design with high frame rates and an external-vision design offering cost-effectiveness. The results demonstrate the internal-vision SRM design achieving an impressive tactile accuracy of 98.96%, enabling soft and adaptive tactile interactions, especially beneficial for dexterous robotic grasping. The external-vision design offers similar performance at a reduced cost and can be adapted for portability, enhancing material science education and robotic learning. This research significantly advances tactile sensing using vision-based motion tracking in soft robotic metamaterials, and the open-source availability on GitHub fosters collaboration and further exploration of this innovative technology (https://github.com/bionicdl-sustech/SoftRoboticTongs).},
keywords = {Authorship - Corresponding, JCR Q1, Jour - Mat. Des. (MADE)},
pubstate = {published},
tppubtype = {article}
}
Ning Guo, Xudong Han, Xiaobo Liu, Shuqiao Zhong, Zhiyuan Zhou, Jian Lin, Jiansheng Dai, Fang Wan, Chaoyang Song
Autoencoding a Soft Touch to Learn Grasping from On-land to Underwater Journal Article
In: Advanced Intelligent Systems, vol. 6, iss. January, no. 1, pp. 2300382, 2024.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Award - Front Cover, JCR Q1, Jour - Adv. Intell. Syst. (AIS)
@article{Guo2024AutoencodingA,
title = {Autoencoding a Soft Touch to Learn Grasping from On-land to Underwater},
author = {Ning Guo and Xudong Han and Xiaobo Liu and Shuqiao Zhong and Zhiyuan Zhou and Jian Lin and Jiansheng Dai and Fang Wan and Chaoyang Song},
doi = {10.1002/aisy.202300382},
year = {2024},
date = {2024-01-01},
urldate = {2024-01-01},
journal = {Advanced Intelligent Systems},
volume = {6},
number = {1},
issue = {January},
pages = {2300382},
abstract = {Robots play a critical role as the physical agent of human operators in exploring the ocean. However, it remains challenging to grasp objects reliably while fully submerging under a highly pressurized aquatic environment with little visible light, mainly due to the fluidic interference on the tactile mechanics between the finger and object surfaces. This study investigates the transferability of grasping knowledge from on-land to underwater via a vision-based soft robotic finger that learns 6D forces and torques (FT) using a supervised variational autoencoder (SVAE). A high-framerate camera captures the whole-body deformations while a soft robotic finger interacts with physical objects on-land and underwater. Results show that the trained SVAE model learns a series of latent representations of the soft mechanics transferable from land to water, presenting a superior adaptation to the changing environments against commercial FT sensors. Soft, delicate, and reactive grasping enabled by tactile intelligence enhances the gripper's underwater interaction with improved reliability and robustness at a much-reduced cost, paving the path for learning-based intelligent grasping to support fundamental scientific discoveries in environmental and ocean research.},
keywords = {Authorship - Corresponding, Award - Front Cover, JCR Q1, Jour - Adv. Intell. Syst. (AIS)},
pubstate = {published},
tppubtype = {article}
}
Yu Pan, Xuanyi Dai, Fang Wan, Chaoyang Song, James KH Tsoi, Edmond HN Pow
A Novel Post-Processing Strategy to Improve the Accuracy of Complete-Arch Intraoral Scanning for Implants: An In Vitro Study Journal Article
In: Journal of Dentistry, vol. 139, iss. December, pp. 104761, 2023.
Abstract | Links | BibTeX | Tags: Authorship - Co-Author, JCR Q1, Jour - J. Dent. (JoD)
@article{Pan2023ANovel,
title = {A Novel Post-Processing Strategy to Improve the Accuracy of Complete-Arch Intraoral Scanning for Implants: An In Vitro Study},
author = {Yu Pan and Xuanyi Dai and Fang Wan and Chaoyang Song and James KH Tsoi and Edmond HN Pow},
doi = {10.1016/j.jdent.2023.104761},
year = {2023},
date = {2023-12-01},
urldate = {2023-12-01},
journal = {Journal of Dentistry},
volume = {139},
issue = {December},
pages = {104761},
abstract = {[Objectives] To develop a new post-processing strategy that utilizes an auxiliary device to adjust intraoral scans and improve the accuracy of 3D models of complete-arch dental implants.
[Materials and methods] An edentulous resin model with 6 dental implants was prepared. An auxiliary device, consisting of an opaque base and artificial landmarks, was fabricated and mounted onto the resin model. Twenty intraoral scans (raw scans) were taken using this setup. A new post-processing strategy was proposed to adjust the raw scans using reverse engineering software (verified group). Additionally, ten conventional gypsum casts were duplicated and digitized using a laboratory scanner. The linear and angular trueness and precision of the models were evaluated and compared. The effect of the proposed strategy on the accuracy of complete-arch intraoral scans was analyzed using one-way ANOVA.
[Results] The linear trueness (29.7 µm) and precision (24.8 µm) of the verified group were significantly better than the raw scans (46.6 µm, 44.7 µm) and conventional casts (51.3 µm, 36.5 µm), particularly in cross-arch sites. However, the angular trueness (0.114°) and precision (0.085°) of the conventional casts were significantly better than both the verified models (0.298°, 0.168°) and the raw scans (0.288°, 0.202°).
[Conclusions] The novel post-processing strategy is effective in enhancing the linear accuracy of complete-arch implant IO scans, especially in cross-arch sites. However, further improvement is needed to eliminate the angular deviations.
[Clinical significance] Errors generated from intraoral scanning in complete edentulous arches exceed the clinical threshold. The elimination of stitching errors in the raw scans particularly in the cross-arch sites, through the proposed post-processing strategy would enhance the accuracy of complete-arch implant prostheses.},
keywords = {Authorship - Co-Author, JCR Q1, Jour - J. Dent. (JoD)},
pubstate = {published},
tppubtype = {article}
}
[Materials and methods] An edentulous resin model with 6 dental implants was prepared. An auxiliary device, consisting of an opaque base and artificial landmarks, was fabricated and mounted onto the resin model. Twenty intraoral scans (raw scans) were taken using this setup. A new post-processing strategy was proposed to adjust the raw scans using reverse engineering software (verified group). Additionally, ten conventional gypsum casts were duplicated and digitized using a laboratory scanner. The linear and angular trueness and precision of the models were evaluated and compared. The effect of the proposed strategy on the accuracy of complete-arch intraoral scans was analyzed using one-way ANOVA.
[Results] The linear trueness (29.7 µm) and precision (24.8 µm) of the verified group were significantly better than the raw scans (46.6 µm, 44.7 µm) and conventional casts (51.3 µm, 36.5 µm), particularly in cross-arch sites. However, the angular trueness (0.114°) and precision (0.085°) of the conventional casts were significantly better than both the verified models (0.298°, 0.168°) and the raw scans (0.288°, 0.202°).
[Conclusions] The novel post-processing strategy is effective in enhancing the linear accuracy of complete-arch implant IO scans, especially in cross-arch sites. However, further improvement is needed to eliminate the angular deviations.
[Clinical significance] Errors generated from intraoral scanning in complete edentulous arches exceed the clinical threshold. The elimination of stitching errors in the raw scans particularly in the cross-arch sites, through the proposed post-processing strategy would enhance the accuracy of complete-arch implant prostheses.
Xiaobo Liu, Xudong Han, Ning Guo, Fang Wan, Chaoyang Song
Bio-inspired Proprioceptive Touch of a Soft Finger with Inner-Finger Kinesthetic Perception Journal Article
In: Biomimetics, vol. 8, no. 6, pp. 501, 2023.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q1, Jour - Biomimetics (Biomimetics)
@article{Liu2023BioInspired,
title = {Bio-inspired Proprioceptive Touch of a Soft Finger with Inner-Finger Kinesthetic Perception},
author = {Xiaobo Liu and Xudong Han and Ning Guo and Fang Wan and Chaoyang Song},
doi = {10.3390/biomimetics8060501},
year = {2023},
date = {2023-10-21},
urldate = {2023-10-21},
journal = {Biomimetics},
volume = {8},
number = {6},
pages = {501},
abstract = {In-hand object pose estimation is challenging for humans and robots due to occlusion caused by the hand and object. This paper proposes a soft finger that integrates inner vision with kinesthetic sensing to estimate object pose inspired by human fingers. The soft finger has a flexible skeleton and skin that adapts to different objects, and the skeleton deformations during interaction provide contact information obtained by the image from the inner camera. The proposed framework is an end-to-end method that uses raw images from soft fingers to estimate in-hand object pose. It consists of an encoder for kinesthetic information processing and an object pose and category estimator. The framework was tested on seven objects, achieving an impressive error of 2.02 mm and 11.34 degrees for pose error and 99.05% for classification.},
keywords = {Authorship - Corresponding, JCR Q1, Jour - Biomimetics (Biomimetics)},
pubstate = {published},
tppubtype = {article}
}
Yuping Gu, Ziqian Wang, Shihao Feng, Haoran Sun, Haibo Lu, Jia Pan, Fang Wan, Chaoyang Song
Computational Design Towards Energy Efficient Optimization in Overconstrained Robotic Limbs Journal Article
In: Journal of Computational Design and Engineering, vol. 10, iss. October, no. 5, pp. 1941–1956, 2023.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Award - Editor's Choice, JCR Q1, Jour - J. Comput. Des. Eng. (JCDE)
@article{Gu2023ComputationalDesign,
title = {Computational Design Towards Energy Efficient Optimization in Overconstrained Robotic Limbs},
author = {Yuping Gu and Ziqian Wang and Shihao Feng and Haoran Sun and Haibo Lu and Jia Pan and Fang Wan and Chaoyang Song},
doi = {10.1093/jcde/qwad083},
year = {2023},
date = {2023-08-22},
urldate = {2023-08-22},
journal = {Journal of Computational Design and Engineering},
volume = {10},
number = {5},
issue = {October},
pages = {1941–1956},
abstract = {Legged robots are constantly evolving, and energy efficiency is a major driving factor in their design. However, combining mechanism efficiency and trajectory planning can be challenging. This work proposes a computational optimization framework for optimizing leg design during basic walking while maximizing energy efficiency. We generalize the robotic limb design as a four-bar linkage-based design pool and optimize the leg using an evolutionary algorithm. The leg configuration and design parameters are optimized based on user-defined objective functions. Our framework was validated by comparing it to measured data on our prototype quadruped robot for forward trotting. The Bennett robotic leg was advantageous for omni-directional locomotion with enhanced energy efficiency.},
keywords = {Authorship - Corresponding, Award - Editor's Choice, JCR Q1, Jour - J. Comput. Des. Eng. (JCDE)},
pubstate = {published},
tppubtype = {article}
}
Jiayu Huo, Jingran Wang, Yuqin Guo, Wanghongjie Qiu, Mingdong Chen, Harry Asada, Fang Wan, Chaoyang Song
Reconfigurable Design and Modeling of an Underwater Superlimb for Diving Assistance Journal Article
In: Advanced Intelligent Systems, vol. 5, iss. November, no. 11, pp. 2300245, 2023.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Award - Back Cover, Award - Editor's Choice, JCR Q1, Jour - Adv. Intell. Syst. (AIS)
@article{Huo2023ReconfigurableDesign,
title = {Reconfigurable Design and Modeling of an Underwater Superlimb for Diving Assistance},
author = {Jiayu Huo and Jingran Wang and Yuqin Guo and Wanghongjie Qiu and Mingdong Chen and Harry Asada and Fang Wan and Chaoyang Song},
doi = {10.1002/aisy.202300245},
year = {2023},
date = {2023-08-17},
urldate = {2023-08-17},
journal = {Advanced Intelligent Systems},
volume = {5},
number = {11},
issue = {November},
pages = {2300245},
abstract = {This study presents the design of an underwater superlimb as a wearable robot, providing divers with mobility assistance and freeing their hands for manipulating tools underwater. The wearable design features a thrust vectoring system with two 3D-printed, waterproofed modules. The module with adjustable connections and strapping holes is designed to enable reconfiguration for multiple purposes, including regular use as an underwater superlimb for divers, manually operated as a handheld glider for swimmers, combined with an amphibian, legged robot as a quadruped superlimb, and coupled as a dual-unit autonomous underwater vehicle for underwater navigation. The kinematics and dynamics of the prototype and all of its reconfigured modes are developed. A sliding-mode controller is also introduced to achieve stable simulation in PyBullet. Field tests further support the feasibility of the underwater superlimb when worn on a test diver in a swimming pool. As the first underwater superlimb presented in the literature, this study opens new doors for supernumerary robotic limbs in underwater scenarios with multifunctional reconfiguration.},
keywords = {Authorship - Corresponding, Award - Back Cover, Award - Editor's Choice, JCR Q1, Jour - Adv. Intell. Syst. (AIS)},
pubstate = {published},
tppubtype = {article}
}
Haoran Sun, Linhan Yang, Yuping Gu, Jia Pan, Fang Wan, Chaoyang Song
Bridging Locomotion and Manipulation Using Reconfigurable Robotic Limbs via Reinforcement Learning Journal Article
In: Biomimetics, vol. 8, no. 4, pp. 364, 2023.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q1, Jour - Biomimetics (Biomimetics)
@article{Sun2023BridgingLocomotion,
title = {Bridging Locomotion and Manipulation Using Reconfigurable Robotic Limbs via Reinforcement Learning},
author = {Haoran Sun and Linhan Yang and Yuping Gu and Jia Pan and Fang Wan and Chaoyang Song},
doi = {10.3390/biomimetics8040364},
year = {2023},
date = {2023-08-14},
urldate = {2023-08-14},
journal = {Biomimetics},
volume = {8},
number = {4},
pages = {364},
abstract = {Locomotion and manipulation are two essential skills in robotics but are often divided or decoupled into two separate problems. It is widely accepted that the topological duality between multi-legged locomotion and multi-fingered manipulation shares an intrinsic model. However, a lack of research remains to identify the data-driven evidence for further research. This paper explores a unified formulation of the loco-manipulation problem using reinforcement learning (RL) by reconfiguring robotic limbs with an overconstrained design into multi-legged and multi-fingered robots. Such design reconfiguration allows for adopting a co-training architecture for reinforcement learning towards a unified loco-manipulation policy. As a result, we find data-driven evidence to support the transferability between locomotion and manipulation skills using a single RL policy with a multilayer perceptron or graph neural network. We also demonstrate the Sim2Real transfer of the learned loco-manipulation skills in a robotic prototype. This work expands the knowledge frontiers on loco-manipulation transferability with learning-based evidence applied in a novel platform with overconstrained robotic limbs.},
keywords = {Authorship - Corresponding, JCR Q1, Jour - Biomimetics (Biomimetics)},
pubstate = {published},
tppubtype = {article}
}
Linhan Yang, Bidan Huang, Qingbiao Li, Ya-Yen Tsai, Wang Wei Lee, Chaoyang Song, Jia Pan
TacGNN: Learning Tactile-based In-hand Manipulation with a Blind Robot using Hierarchical Graph Neural Network Journal Article
In: IEEE Robotics and Automation Letters, vol. 8, iss. June, no. 6, pp. 3605-3612, 2023.
Abstract | Links | BibTeX | Tags: Authorship - Co-Author, JCR Q1, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track
@article{Yang2023TacGNN,
title = {TacGNN: Learning Tactile-based In-hand Manipulation with a Blind Robot using Hierarchical Graph Neural Network},
author = {Linhan Yang and Bidan Huang and Qingbiao Li and Ya-Yen Tsai and Wang Wei Lee and Chaoyang Song and Jia Pan},
doi = {10.1109/LRA.2023.3264759},
year = {2023},
date = {2023-04-05},
urldate = {2023-04-05},
journal = {IEEE Robotics and Automation Letters},
volume = {8},
number = {6},
issue = {June},
pages = {3605-3612},
abstract = {In this letter, we propose a novel framework for tactile-based dexterous manipulation learning with a blind anthropomorphic robotic hand, i.e. without visual sensing. First, object-related states were extracted from the raw tactile signals by a graph-based perception model - TacGNN. The resulting tactile features were then utilized in the policy learning of an in-hand manipulation task in the second stage. This method was examined by a Baoding ball task - simultaneously manipulating two spheres around each other by 180 degrees in hand. We conducted experiments on object states prediction and in-hand manipulation using a reinforcement learning algorithm (PPO). Results show that TacGNN is effective in predicting object-related states during manipulation by decreasing the RMSE of prediction to 0.096 cm comparing to other methods, such as MLP, CNN, and GCN. Finally, the robot hand could finish an in-hand manipulation task solely relying on the robotic own perception - tactile sensing and proprioception. In addition, our methods are tested on three tasks with different difficulty levels and transferred to the real robot without further training.},
keywords = {Authorship - Co-Author, JCR Q1, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track},
pubstate = {published},
tppubtype = {article}
}
Hao Tian, Chaoyang Song, Changbo Wang, Xinyu Zhang, Jia Pan
Sampling-Based Planning for Retrieving Near-Cylindrical Objects in Cluttered Scenes Using Hierarchical Graphs Journal Article
In: IEEE Transactions on Robotics, vol. 39, iss. February, no. 1, pp. 165-182, 2023.
Abstract | Links | BibTeX | Tags: Authorship - Co-Author, JCR Q1, Jour - IEEE Trans. Robot. (T-RO)
@article{Tian2023SamplingBased,
title = {Sampling-Based Planning for Retrieving Near-Cylindrical Objects in Cluttered Scenes Using Hierarchical Graphs},
author = {Hao Tian and Chaoyang Song and Changbo Wang and Xinyu Zhang and Jia Pan},
doi = {10.1109/TRO.2022.3191596},
year = {2023},
date = {2023-02-01},
urldate = {2023-02-01},
journal = {IEEE Transactions on Robotics},
volume = {39},
number = {1},
issue = {February},
pages = {165-182},
abstract = {We present an incremental sampling-based task and motion planner for retrieving near-cylindrical objects, like bottle, in cluttered scenes, which computes a plan for removing obstacles to generate a collision-free motion of a robot to retrieve the target object. Our proposed planner uses a two-level hierarchy, including the first-level roadmap for the target object motion and the second-level retrieval graph for the entire robot motion, to aid in deciding the order and trajectory of object removal. We use an incremental expansion strategy to update the roadmap and retrieval graph from the collisions between the target object, the robot, and the obstacles, in order to optimize the object removal sequence. The performance of our method is highlighted in several benchmark scenes, including a fixed robotic arm in a cluttered scene with known obstacle locations and a scene, where locations of some objects or even the target object are unknown due to occlusions. Our method can also efficiently solve the high-dimensional planning problem of object retrieval using a mobile manipulator and be combined with the symbolic planner to plan complex multistep tasks. We deploy our method to a physical robot and integrate it with nonprehensile actions to improve operational efficiency. Compared to the state-of-the-art approaches, our method reduces task and motion planning time up to 24.6% with a higher success rate, and still provides a near-optimal plan.},
keywords = {Authorship - Co-Author, JCR Q1, Jour - IEEE Trans. Robot. (T-RO)},
pubstate = {published},
tppubtype = {article}
}
Yuping Gu, Shihao Feng, Yuqin Guo, Fang Wan, Jiansheng Dai, Jia Pan, Chaoyang Song
Overconstrained Coaxial Design of Robotic Legs with Omni-directional Locomotion Journal Article
In: Mechanism and Machine Theory, vol. 176, iss. October, pp. 105018, 2022.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q1, Jour - Mech. Mach. Theory (MMT)
@article{Gu2022OverconstrainedCoaxial,
title = {Overconstrained Coaxial Design of Robotic Legs with Omni-directional Locomotion},
author = {Yuping Gu and Shihao Feng and Yuqin Guo and Fang Wan and Jiansheng Dai and Jia Pan and Chaoyang Song},
doi = {10.1016/j.mechmachtheory.2022.105018},
year = {2022},
date = {2022-10-01},
urldate = {2022-10-01},
journal = {Mechanism and Machine Theory},
volume = {176},
issue = {October},
pages = {105018},
abstract = {While being extensively researched in literature, overconstrained linkages’ engineering potential is yet to be explored. This study investigates the design of overconstrained linkages as robotic legs with coaxial actuation starting with the simplest case, Bennett linkage, to establish the theoretical foundations and engineering advantages of a class of overconstrained robots. We proposed a parametric design of the spatial links and joints in alternative forms so that one can fabricate these overconstrained limbs via 3D printing and then attach the linkage coaxially to a pair of servo actuators as a reconfigurable leg module. We adopted multi-objective optimization to refine the design parameters by analyzing its manipulability metric and force transmission, enabling omni-directional ground locomotion projected from a three-dimensional surface workspace. The proposed prototype quadruped was capable of omni-directional locomotion and had a minimal turning radius (0.2 Body Length) using the fewest actuators. We further explored the kinematics and design potentials to generalize the proposed method for all overconstrained 5R and 6R linkages, paving the path for a future direction in overconstrained robotics.},
keywords = {Authorship - Corresponding, JCR Q1, Jour - Mech. Mach. Theory (MMT)},
pubstate = {published},
tppubtype = {article}
}
Chaoyang Song, Jianxi Luo, Katja Hölttä-Otto, Warren Seering, Kevin Otto
Crowdfunding for Design Innovation: Prediction Model with Critical Factors Journal Article
In: IEEE Transactions on Engineering Management, vol. 69, iss. August, no. 4, pp. 1565-1576, 2022.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Authorship - First Author, JCR Q1, Jour - IEEE Trans. Eng. Manag. (TEM)
@article{Song2022CrowdfunndingFor,
title = {Crowdfunding for Design Innovation: Prediction Model with Critical Factors},
author = {Chaoyang Song and Jianxi Luo and Katja Hölttä-Otto and Warren Seering and Kevin Otto},
doi = {10.1109/tem.2020.3001764},
year = {2022},
date = {2022-08-01},
urldate = {2022-08-01},
journal = {IEEE Transactions on Engineering Management},
volume = {69},
number = {4},
issue = {August},
pages = {1565-1576},
abstract = {Online reward-based crowdfunding campaigns have emerged as an innovative approach for validating demands, discovering early adopters, and seeking learning and feedback in the design processes of innovative products. However, crowdfunding campaigns for innovative products are faced with a high degree of uncertainty and suffer meager rates of success to fulfill their values for design. To guide designers and innovators for crowdfunding campaigns, this article presents a data-driven methodology to build a prediction model with critical factors for crowdfunding success, based on public online crowdfunding campaign data. Specifically, the methodology filters 26 candidate factors in the real-win-worth framework and identifies the critical ones via stepwise regression to predict the amount of crowdfunding. We demonstrate the methods via deriving prediction models and identifying essential factors from three-dimensional printer and smartwatch campaign data on Kickstarter and Indiegogo. The critical factors can guide campaign developments, and the prediction model may evaluate crowdfunding potential of innovations in contexts, to increase the chance of crowdfunding success of innovative products.
},
keywords = {Authorship - Corresponding, Authorship - First Author, JCR Q1, Jour - IEEE Trans. Eng. Manag. (TEM)},
pubstate = {published},
tppubtype = {article}
}
Youcan Yan, Yajing Shen, Chaoyang Song, Jia Pan
Tactile Super-Resolution Model for Soft Magnetic Skin Journal Article
In: IEEE Robotics and Automation Letters, vol. 7, iss. April, no. 2, pp. 2589-2596, 2022.
Abstract | Links | BibTeX | Tags: Authorship - Co-Author, JCR Q1, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track
@article{Yan2022TactileSuper,
title = {Tactile Super-Resolution Model for Soft Magnetic Skin},
author = {Youcan Yan and Yajing Shen and Chaoyang Song and Jia Pan},
doi = {10.1109/LRA.2022.3141449},
year = {2022},
date = {2022-01-10},
urldate = {2022-01-10},
journal = {IEEE Robotics and Automation Letters},
volume = {7},
number = {2},
issue = {April},
pages = {2589-2596},
abstract = {Tactile sensors of high spatial resolution can provide rich contact information in terms of accurate contact location and force magnitude for robots. However, achieving a high spatial resolution normally requires a high density of tactile sensing cells (or taxels), which will inevitably lead to crowded wire connections, more data acquisition time and probably crosstalk between taxels. An alternative approach to improve the spatial resolution without introducing a high density of taxels is employing super-resolution technology. Here, we propose a novel tactile super-resolution method based on a sinusoidally magnetized soft magnetic skin, by which we have achieved a 15-fold improvement of localization accuracy (from 6 mm to 0.4 mm) as well as the ability to measure the force magnitude. Different from the existing super-resolution methods that rely on overlapping signals of neighbouring taxels, our model only relies on the local information from a single 3-axis taxel and thereby can detect multipoint contact applied on neighboring taxels and work properly even when some of the neighbouring taxels near the contact position are damaged (or unavailable). With this property, our method would be robust to damage and could potentially benefit robotic applications that require multipoint contact detection.},
keywords = {Authorship - Co-Author, JCR Q1, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track},
pubstate = {published},
tppubtype = {article}
}
Sicong Liu, Yuming Zhu, Zicong Zhang, Zhonggui Fang, Jiyong Tan, Jing Peng, Chaoyang Song, Harry Asada, Zheng Wang
Otariidae-Inspired Soft-Robotic Supernumerary Flippers by Fabric Kirigami and Origami Journal Article
In: IEEE/ASME Transactions on Mechatronics, vol. 26, iss. October, no. 5, pp. 2747-2757, 2021.
Abstract | Links | BibTeX | Tags: Authorship - Co-Author, JCR Q1, Jour - IEEE ASME Trans. Mechatron. (TMech)
@article{Liu2021OtariidaeInspired,
title = {Otariidae-Inspired Soft-Robotic Supernumerary Flippers by Fabric Kirigami and Origami},
author = {Sicong Liu and Yuming Zhu and Zicong Zhang and Zhonggui Fang and Jiyong Tan and Jing Peng and Chaoyang Song and Harry Asada and Zheng Wang},
doi = {10.1109/TMECH.2020.3045476},
year = {2021},
date = {2021-10-01},
urldate = {2021-10-01},
journal = {IEEE/ASME Transactions on Mechatronics},
volume = {26},
number = {5},
issue = {October},
pages = {2747-2757},
abstract = {Wearable robotic devices are receiving rapidly growing attentions for human-centered scenarios from medical, rehabilitation, to industrial applications. Supernumerary robotic limbs have been widely investigated for the augmentation of human limb functions, both as fingers and manipulator arms. Soft robotics offers an alternative approach to conventional motor-driven robot limbs toward safer and lighter systems, while pioneering soft supernumerary limbs are strongly limited in payload and dexterity by the soft robotic design approach, as well as the fabrication techniques. In this article, we proposed a wearable supernumerary soft robot for the human forearm, inspired by the fore flippers of otariids (eared seals). A flat flipper design was adopted, differing from the finger- or arm-shaped state-of-the-art works, with multiple soft actuators embedded as different joints for manipulation dexterity. The soft actuators were designed following origami (paper folding) patterns, reinforced by kirigami (paper cutting) fabrics. With this new approach, the proposed soft flipper incorporated eight independent muscles, achieving over 20 times payload to self-weight ratio, while weighing less than 500 g. The versatility, dexterity, and payload capability were experimentally demonstrated using a fabricated prototype with proprietary actuation and control. This article demonstrates the feasibility and unique advantages of origami + kirigami soft robots as a new approach to strong, dexterous, and yet safe and lightweight wearable robotic devices.},
keywords = {Authorship - Co-Author, JCR Q1, Jour - IEEE ASME Trans. Mechatron. (TMech)},
pubstate = {published},
tppubtype = {article}
}
Baiyue Wang, Weijie Guo, Shihao Feng, Hongdong Yi, Fang Wan, Chaoyang Song
Volumetrically Enhanced Soft Actuator with Proprioceptive Sensing Journal Article
In: IEEE Robotics and Automation Letters, vol. 6, iss. July, no. 3, pp. 5284-5291, 2021.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q1, JCR Q2, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track
@article{Wang2021VolumetricallyEnhanced,
title = {Volumetrically Enhanced Soft Actuator with Proprioceptive Sensing},
author = {Baiyue Wang and Weijie Guo and Shihao Feng and Hongdong Yi and Fang Wan and Chaoyang Song},
doi = {10.1109/LRA.2021.3072859},
year = {2021},
date = {2021-07-01},
urldate = {2021-07-01},
journal = {IEEE Robotics and Automation Letters},
volume = {6},
number = {3},
issue = {July},
pages = {5284-5291},
abstract = {Soft robots often show a superior power-to-weight ratio using highly compliant, light-weight material, which leverages various bio-inspired body designs to generate desirable deformations for life-like motions. In this letter, given that most material used for soft robots is light-weight in general, we propose a volumetrically enhanced design strategy for soft robots, providing a novel design guideline to govern the form factor of soft robots. We present the design, modeling, and optimization of a volumetrically enhanced soft actuator (VESA) with linear and rotary motions, respectively, achieving superior force and torque output, linear and rotary displacement, and overall extension ratio per unit volume. We further explored VESA's proprioceptive sensing capability by validating the output force and torque through analytical modeling and experimental verification. Our results show that the volumetric metrics hold the potential to be used as a practical design guideline to optimize soft robots’ engineering performance.},
keywords = {Authorship - Corresponding, JCR Q1, JCR Q2, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track},
pubstate = {published},
tppubtype = {article}
}
Linhan Yang, Xudong Han, Weijie Guo, Fang Wan, Jia Pan, Chaoyang Song
Learning-based Optoelectronically Innervated Tactile Finger for Rigid-Soft Interactive Grasping Journal Article
In: IEEE Robotics and Automation Letters, vol. 6, iss. April, no. 2, pp. 3817 - 3824, 2021.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q1, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track
@article{Yang2021LearningBased,
title = {Learning-based Optoelectronically Innervated Tactile Finger for Rigid-Soft Interactive Grasping},
author = {Linhan Yang and Xudong Han and Weijie Guo and Fang Wan and Jia Pan and Chaoyang Song},
doi = {10.1109/LRA.2021.3065186},
year = {2021},
date = {2021-04-01},
urldate = {2021-04-01},
booktitle = {IEEE International Conference on Robotics and Automation (ICRA)},
journal = {IEEE Robotics and Automation Letters},
volume = {6},
number = {2},
issue = {April},
pages = {3817 - 3824},
address = {Xi’an, China},
abstract = {This letter presents a novel design of a soft tactile finger with omni-directional adaptation using multi-channel optical fibers for rigid-soft interactive grasping. Machine learning methods are used to train a model for real-time prediction of force, torque, and contact using the tactile data collected. We further integrated such fingers in a reconfigurable gripper design with three fingers so that the finger arrangement can be actively adjusted in real-time based on the tactile data collected during grasping, achieving the process of rigid-soft interactive grasping. Detailed sensor calibration and experimental results are also included to further validate the proposed design for enhanced grasping robustness. Video: https://www.youtube.com/watch?v=ynCfSA4FQnY.},
keywords = {Authorship - Corresponding, JCR Q1, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track},
pubstate = {published},
tppubtype = {article}
}
Youcan Yan, Zhe Hu, Zhengbao Yang, Wenzhen Yuan, Chaoyang Song, Jia Pan, Yajing Shen
Soft Magnetic Skin for Super-Resolution Tactile Sensing with Force Self-Decoupling Journal Article
In: Science Robotics, vol. 6, no. 51, pp. eabc8801, 2021.
Abstract | Links | BibTeX | Tags: Authorship - Co-Author, JCR Q1, Jour - Sci. Robot. (SciRob)
@article{Yan2021SoftMagnetic,
title = {Soft Magnetic Skin for Super-Resolution Tactile Sensing with Force Self-Decoupling},
author = {Youcan Yan and Zhe Hu and Zhengbao Yang and Wenzhen Yuan and Chaoyang Song and Jia Pan and Yajing Shen},
doi = {10.1126/scirobotics.abc8801},
year = {2021},
date = {2021-02-24},
urldate = {2021-02-24},
journal = {Science Robotics},
volume = {6},
number = {51},
pages = {eabc8801},
abstract = {Human skin can sense subtle changes of both normal and shear forces (i.e., self-decoupled) and perceive stimuli with finer resolution than the average spacing between mechanoreceptors (i.e., super-resolved). By contrast, existing tactile sensors for robotic applications are inferior, lacking accurate force decoupling and proper spatial resolution at the same time. Here, we present a soft tactile sensor with self-decoupling and super-resolution abilities by designing a sinusoidally magnetized flexible film (with the thickness ~0.5 millimeters), whose deformation can be detected by a Hall sensor according to the change of magnetic flux densities under external forces. The sensor can accurately measure the normal force and the shear force (demonstrated in one dimension) with a single unit and achieve a 60-fold super-resolved accuracy enhanced by deep learning. By mounting our sensor at the fingertip of a robotic gripper, we show that robots can accomplish challenging tasks such as stably grasping fragile objects under external disturbance and threading a needle via teleoperation. This research provides new insight into tactile sensor design and could be beneficial to various applications in robotics field, such as adaptive grasping, dexterous manipulation, and human-robot interaction.},
keywords = {Authorship - Co-Author, JCR Q1, Jour - Sci. Robot. (SciRob)},
pubstate = {published},
tppubtype = {article}
}
Fang Wan, Haokun Wang, Jiyuan Wu, Yujia Liu, Sheng Ge, Chaoyang Song
A Reconfigurable Design for Omni-adaptive Grasp Learning Journal Article
In: IEEE Robotics and Automation Letters, vol. 5, iss. July, no. 3, pp. 4210-4217, 2020.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q1, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track
@article{Wan2020AReconfigurable,
title = {A Reconfigurable Design for Omni-adaptive Grasp Learning},
author = {Fang Wan and Haokun Wang and Jiyuan Wu and Yujia Liu and Sheng Ge and Chaoyang Song},
doi = {10.1109/lra.2020.2982059},
year = {2020},
date = {2020-07-01},
urldate = {2020-07-01},
journal = {IEEE Robotics and Automation Letters},
volume = {5},
number = {3},
issue = {July},
pages = {4210-4217},
abstract = {The engineering design of robotic grippers presents an ample design space for optimization towards robust grasping. In this letter, we investigate how learning method can be used to support the design reconfiguration of robotic grippers for grasping using a novel soft structure with omni-directional adaptation. We propose a gripper system that is reconfigurable in terms of the number and arrangement of the proposed finger, which generates a large number of possible design configurations. Such design reconfigurations with omni-adaptive fingers enables us to systematically investigate the optimal arrangement of the fingers towards robust grasping. Furthermore, we adopt a learning-based method as the baseline to benchmark the effectiveness of each design configuration. As a result, we found that the 3-finger radial configuration is suitable for space-saving and cost-effectiveness, achieving an average 96% grasp success rate on seen and novel objects selected from the YCB dataset. The 4-finger radial arrangement can be applied to cases that require a higher payload with even distribution. We achieved dimension reduction using the radial gripper design with the removal of z-axis rotation during grasping. We also reported the different outcomes with or without friction enhancement of the soft finger network.},
keywords = {Authorship - Corresponding, JCR Q1, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track},
pubstate = {published},
tppubtype = {article}
}
Linhan Yang, Fang Wan, Haokun Wang, Xiaobo Liu, Yujia Liu, Jia Pan, Chaoyang Song
Rigid-Soft Interactive Learning for Robust Grasping Journal Article
In: IEEE Robotics and Automation Letters, vol. 5, iss. April, no. 2, pp. 1720 - 1727, 2020.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q1, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track
@article{Yang2020RigidSoft,
title = {Rigid-Soft Interactive Learning for Robust Grasping},
author = {Linhan Yang and Fang Wan and Haokun Wang and Xiaobo Liu and Yujia Liu and Jia Pan and Chaoyang Song},
doi = {10.1109/lra.2020.2969932},
year = {2020},
date = {2020-04-01},
urldate = {2020-04-01},
booktitle = {IEEE International Conference on Robotics and Automation (ICRA)},
journal = {IEEE Robotics and Automation Letters},
volume = {5},
number = {2},
issue = {April},
pages = {1720 - 1727},
address = {Paris, France},
abstract = {Robot learning is widely accepted by academia and industry with its potentials to transform autonomous robot control through machine learning. Inspired by widely used soft fingers on grasping, we propose a method of rigid-soft interactive learning, aiming at reducing the time of data collection. In this letter, we classify the interaction categories into Rigid-Rigid, Rigid-Soft, SoftRigid according to the interaction surface between grippers and target objects. We find experimental evidence that the interaction types between grippers and target objects play an essential role in the learning methods. We use soft, stuffed toys for training, instead of everyday objects, to reduce the integration complexity and computational burden. Although the stuffed toys are limited in reflecting the physics of finger-object interaction in real-life scenarios, we exploit such rigid-soft interaction by changing the gripper fingers to the soft ones when dealing with rigid, daily-life items such as the Yale-CMU-Berkeley (YCB) objects. With a small data collection of 5 K picking attempts in total, our results suggest that such Rigid-Soft and Soft-Rigid interactions are transferable. Moreover, the combination of such interactions shows better performance on the grasping test. We also explore the effect of the grasp type on the learning method by changing the gripper configurations. We achieve the best grasping performance at 97.5% for easy YCB objects and 81.3% for difficult YCB objects while using a precise grasp with a two-soft-finger gripper to collect training data and power grasp with a four-soft-finger gripper to test the grasp policy.},
keywords = {Authorship - Corresponding, JCR Q1, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track},
pubstate = {published},
tppubtype = {article}
}
Juan Yi, Xiaojiao Chen, Chaoyang Song, Jianshu Zhou, Yujia Liu, Sicong Liu, Zheng Wang
Customizable Three-Dimensional-Printed Origami Soft Robotic Joint with Effective Behavior Shaping for Safe Interactions Journal Article
In: IEEE Transactions on Robotics, vol. 35, iss. February, no. 1, pp. 114-123, 2019.
Abstract | Links | BibTeX | Tags: Authorship - Co-Author, JCR Q1, Jour - IEEE Trans. Robot. (T-RO)
@article{Yi2019Customizable3D,
title = {Customizable Three-Dimensional-Printed Origami Soft Robotic Joint with Effective Behavior Shaping for Safe Interactions},
author = {Juan Yi and Xiaojiao Chen and Chaoyang Song and Jianshu Zhou and Yujia Liu and Sicong Liu and Zheng Wang},
doi = {10.1109/tro.2018.2871440},
year = {2019},
date = {2019-02-01},
urldate = {2019-02-01},
journal = {IEEE Transactions on Robotics},
volume = {35},
number = {1},
issue = {February},
pages = {114-123},
abstract = {Fast-growing interests in safe and effective robot–environment interactions stimulated global investigations on soft robotics. The inherent compliance of soft robots ensures promising safety features but drastically reduces force capability, thereby complicating system modeling and control. To tackle these limitations, a soft robotic joint with enhanced strength, servo performance, and impact behavior shaping is proposed in this paper, based on novel three-dimensional-printed soft origami rotary actuators. The complete workflow is presented from the concept of origami design and analytical modeling, joint design, fabrication, control, and validation experiments. The proposed approach facilitates a fully customizable joint design towards the desired force capability and motion range. Validation results from models and experiments using multiple fabricated prototypes proved the excellent performance linearity and superior force capability, with 18.5-N·m maximum torque under 180 kPa, and 300-g self-weight. The behavior shaping capability is achieved by a low-level joint-angle servo and a high-level variable-stiffness regulation; this significantly reduces the impact torque by 53% and ensures powerful and safe interactions. The comprehensive guidelines provide insightful references for soft robotic design for wider robotic applications.},
keywords = {Authorship - Co-Author, JCR Q1, Jour - IEEE Trans. Robot. (T-RO)},
pubstate = {published},
tppubtype = {article}
}
Juan Yi, Xiaojiao Chen, Chaoyang Song, Zheng Wang
Fiber-Reinforced Origamic Robotic Actuator Journal Article
In: Soft Robotics, vol. 5, no. 1, pp. 81-92, 2018.
Abstract | Links | BibTeX | Tags: Authorship - Co-Author, JCR Q1, Jour - Soft Robot. (SORO)
@article{Yi2018FiberReinforced,
title = {Fiber-Reinforced Origamic Robotic Actuator},
author = {Juan Yi and Xiaojiao Chen and Chaoyang Song and Zheng Wang},
doi = {10.1089/soro.2016.0079},
year = {2018},
date = {2018-02-01},
urldate = {2018-02-01},
journal = {Soft Robotics},
volume = {5},
number = {1},
pages = {81-92},
abstract = {A novel pneumatic soft linear actuator Fiber-reinforced Origamic Robotic Actuator (FORA) is proposed with significant improvements on the popular McKibben-type actuators, offering nearly doubled motion range, substantially improved force profile, and significantly lower actuation pressure. The desirable feature set is made possible by a novel soft origamic chamber that expands radially while contracts axially when pressurized. Combining this new origamic chamber with a reinforcing fiber mesh, FORA generates very high traction force (over 150N) and very large contractile motion (over 50%) at very low input pressure (100 kPa). We developed quasi-static analytical models both to characterize the motion and forces and as guidelines for actuator design. Fabrication of FORA mostly involves consumer-grade three-dimensional (3D) printing. We provide a detailed list of materials and dimensions. Fabricated FORAs were tested on a dedicated platform against commercially available pneumatic artificial muscles from Shadow and Festo to showcase its superior performances and validate the analytical models with very good agreements. Finally, a robotic joint was developed driven by two antagonistic FORAs, to showcase the benefits of the performance improvements. With its simple structure, fully characterized mechanism, easy fabrication procedure, and highly desirable performance, FORA could be easily customized to application requirements and fabricated by anyone with access to a 3D printer. This will pave the way to the wider adaptation and application of soft robotic systems.},
keywords = {Authorship - Co-Author, JCR Q1, Jour - Soft Robot. (SORO)},
pubstate = {published},
tppubtype = {article}
}
Chaoyang Song, Huijuan Feng, Yan Chen, I-Ming Chen, Rongjie Kang
Reconfigurable Mechanism Generated from the Network of Bennett Linkages Journal Article
In: Mechanism and Machine Theory, vol. 88, iss. June, pp. 49-62, 2015.
Abstract | Links | BibTeX | Tags: Authorship - First Author, JCR Q1, Jour - Mech. Mach. Theory (MMT)
@article{Song2015ReconfigurableMechanism,
title = {Reconfigurable Mechanism Generated from the Network of Bennett Linkages},
author = {Chaoyang Song and Huijuan Feng and Yan Chen and I-Ming Chen and Rongjie Kang},
doi = {10.1016/j.mechmachtheory.2015.02.003},
year = {2015},
date = {2015-06-01},
urldate = {2015-06-01},
journal = {Mechanism and Machine Theory},
volume = {88},
issue = {June},
pages = {49-62},
abstract = {A network of four Bennett linkages is proposed in this paper. Totally five types of overconstrained 5R and 6R linkages, including the generalized Goldberg 5R linkage, generalized variant of the L-shape Goldberg 6R linkage, Waldron's hybrid 6R linkage, isomerized case of the generalized L-shape Goldberg 6R linkage, and generalized Wohlhart's double-Goldberg 6R linkage, can be constructed by modifying this Bennett network. The 8R linkage formed by Bennett network serves as the basic mechanism to realise the reconfiguration among five types of overconstrained linkages by rigidifying some of the eight joints. The work also reveals the in-depth relationship among the Bennett-based linkages, which provides a substantial advancement in the design of reconfigurable mechanisms using overconstrained linkages.},
keywords = {Authorship - First Author, JCR Q1, Jour - Mech. Mach. Theory (MMT)},
pubstate = {published},
tppubtype = {article}
}
Chaoyang Song, Yan Chen, I-Ming Chen
A 6R Linkage Reconfigurable between the Line-Symmetric Bricard Linkage and the Bennett Linkage Journal Article
In: Mechanism and Machine Theory, vol. 70, iss. December, pp. 278-292, 2013.
Abstract | Links | BibTeX | Tags: Authorship - First Author, JCR Q1, Jour - Mech. Mach. Theory (MMT)
@article{Song2013A6R,
title = {A 6R Linkage Reconfigurable between the Line-Symmetric Bricard Linkage and the Bennett Linkage},
author = {Chaoyang Song and Yan Chen and I-Ming Chen},
doi = {10.1016/j.mechmachtheory.2013.07.013},
year = {2013},
date = {2013-12-01},
urldate = {2013-12-01},
journal = {Mechanism and Machine Theory},
volume = {70},
issue = {December},
pages = {278-292},
abstract = {This paper explores the feasibility of constructing mechanisms reconfigurable between 6R and 4R overconstrained linkages. Spatial triangle and Bennett linkage are used as the building blocks to form the reconfigurable Bricard linkage. Due to the different directions of the joint axes, the Bennett linkage can be setup in either asymmetric or line-symmetric manners. Subsequently, two 6R linkages are constructed in asymmetric and line-symmetric configurations, respectively. Their potential of reconfiguration is investigated through bifurcation analysis. The result shows that the asymmetric one can be reconfigured between Bennett linkage and general line-symmetric Bricard linkage through bifurcation points, while the line-symmetric one only functions as a Bennett linkage with two additional fixed joints.},
keywords = {Authorship - First Author, JCR Q1, Jour - Mech. Mach. Theory (MMT)},
pubstate = {published},
tppubtype = {article}
}
Chaoyang Song, Yan Chen
Multiple Linkage Forms and Bifurcation Behaviours of the Double-Subtractive-Goldberg 6R Linkage Journal Article
In: Mechanism and Machine Theory, vol. 57, iss. November, pp. 95-110, 2012.
Abstract | Links | BibTeX | Tags: Authorship - First Author, JCR Q1, Jour - Mech. Mach. Theory (MMT)
@article{Song2012MultipleLinkage,
title = {Multiple Linkage Forms and Bifurcation Behaviours of the Double-Subtractive-Goldberg 6R Linkage},
author = {Chaoyang Song and Yan Chen},
doi = {10.1016/j.mechmachtheory.2012.07.002},
year = {2012},
date = {2012-11-01},
urldate = {2012-11-01},
journal = {Mechanism and Machine Theory},
volume = {57},
issue = {November},
pages = {95-110},
abstract = {In this paper, a particular type of double-subtractive-Goldberg 6R linkage is obtained by combining two subtractive Goldberg 5R linkages on the commonly shared ‘roof-links’ through the common link-pair method and common Bennett-linkage method. Two distinct linkage forms are obtained with the identical geometry conditions, yet different closure equations. Bifurcation behaviours of these two forms are analysed, leading to the discovery of two more linkage forms of this linkage, which cannot be constructed with Bennett linkages or Goldberg linkages directly. From the construction process, this 6R linkage belongs to the Bennett-based linkages. But about the bifurcation behaviours, it is closely related to the line-symmetric Bricard linkage because of its hidden symmetric property. Therefore, it could play an important role in exploring the relationship between the Bennett-based linkages and the Bricard linkages.},
keywords = {Authorship - First Author, JCR Q1, Jour - Mech. Mach. Theory (MMT)},
pubstate = {published},
tppubtype = {article}
}
Chaoyang Song, Yan Chen
A Spatial 6R Linkage Derived from Subtractive Goldberg 5R Linkages Journal Article
In: Mechanism and Machine Theory, vol. 46, iss. August, no. 8, pp. 1097-1106, 2011.
Abstract | Links | BibTeX | Tags: Authorship - First Author, JCR Q1, Jour - Mech. Mach. Theory (MMT)
@article{Song2011ASpatial,
title = {A Spatial 6R Linkage Derived from Subtractive Goldberg 5R Linkages},
author = {Chaoyang Song and Yan Chen},
doi = {10.1016/j.mechmachtheory.2011.03.006},
year = {2011},
date = {2011-08-01},
urldate = {2011-08-01},
journal = {Mechanism and Machine Theory},
volume = {46},
number = {8},
issue = {August},
pages = {1097-1106},
abstract = {In this paper, a subtractive Goldberg 5R linkage is defined as a variation of Goldberg 5R linkage. A spatial 6R linkage is constructed by combining two subtractive Goldberg 5R linkages through a common Bennett linkage. This 6R linkage, namely double subtractive Goldberg 6R linkage, appears to be distinct from other existing spatial 6R overconstrained linkages reported before. Both the overconstrained geometric conditions and the closure equations of the proposed linkage are derived. Physical models are also made to validate the linkage.},
keywords = {Authorship - First Author, JCR Q1, Jour - Mech. Mach. Theory (MMT)},
pubstate = {published},
tppubtype = {article}
}
Conference Papers
Sorry, no publications matched your criteria.
Conference Workshops & Extended Abstracts
Sorry, no publications matched your criteria.
Doctoral Thesis
Sorry, no publications matched your criteria.


































