




Working Papers
Sorry, no publications matched your criteria.
Under Review
Sorry, no publications matched your criteria.
Journal Articles
Linhan Yang, Bidan Huang, Qingbiao Li, Ya-Yen Tsai, Wang Wei Lee, Chaoyang Song, Jia Pan
TacGNN: Learning Tactile-based In-hand Manipulation with a Blind Robot using Hierarchical Graph Neural Network Journal Article
In: IEEE Robotics and Automation Letters, vol. 8, iss. June, no. 6, pp. 3605-3612, 2023.
Abstract | Links | BibTeX | Tags: Authorship - Co-Author, JCR Q2, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track
@article{Yang2023TacGNN,
title = {TacGNN: Learning Tactile-based In-hand Manipulation with a Blind Robot using Hierarchical Graph Neural Network},
author = {Linhan Yang and Bidan Huang and Qingbiao Li and Ya-Yen Tsai and Wang Wei Lee and Chaoyang Song and Jia Pan},
doi = {10.1109/LRA.2023.3264759},
year = {2023},
date = {2023-04-05},
urldate = {2023-04-05},
journal = {IEEE Robotics and Automation Letters},
volume = {8},
number = {6},
issue = {June},
pages = {3605-3612},
abstract = {In this letter, we propose a novel framework for tactile-based dexterous manipulation learning with a blind anthropomorphic robotic hand, i.e. without visual sensing. First, object-related states were extracted from the raw tactile signals by a graph-based perception model - TacGNN. The resulting tactile features were then utilized in the policy learning of an in-hand manipulation task in the second stage. This method was examined by a Baoding ball task - simultaneously manipulating two spheres around each other by 180 degrees in hand. We conducted experiments on object states prediction and in-hand manipulation using a reinforcement learning algorithm (PPO). Results show that TacGNN is effective in predicting object-related states during manipulation by decreasing the RMSE of prediction to 0.096 cm comparing to other methods, such as MLP, CNN, and GCN. Finally, the robot hand could finish an in-hand manipulation task solely relying on the robotic own perception - tactile sensing and proprioception. In addition, our methods are tested on three tasks with different difficulty levels and transferred to the real robot without further training.},
keywords = {Authorship - Co-Author, JCR Q2, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track},
pubstate = {published},
tppubtype = {article}
}
Youcan Yan, Yajing Shen, Chaoyang Song, Jia Pan
Tactile Super-Resolution Model for Soft Magnetic Skin Journal Article
In: IEEE Robotics and Automation Letters, vol. 7, iss. April, no. 2, pp. 2589-2596, 2022.
Abstract | Links | BibTeX | Tags: Authorship - Co-Author, JCR Q2, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track
@article{Yan2022TactileSuper,
title = {Tactile Super-Resolution Model for Soft Magnetic Skin},
author = {Youcan Yan and Yajing Shen and Chaoyang Song and Jia Pan},
doi = {10.1109/LRA.2022.3141449},
year = {2022},
date = {2022-01-10},
urldate = {2022-01-10},
journal = {IEEE Robotics and Automation Letters},
volume = {7},
number = {2},
issue = {April},
pages = {2589-2596},
abstract = {Tactile sensors of high spatial resolution can provide rich contact information in terms of accurate contact location and force magnitude for robots. However, achieving a high spatial resolution normally requires a high density of tactile sensing cells (or taxels), which will inevitably lead to crowded wire connections, more data acquisition time and probably crosstalk between taxels. An alternative approach to improve the spatial resolution without introducing a high density of taxels is employing super-resolution technology. Here, we propose a novel tactile super-resolution method based on a sinusoidally magnetized soft magnetic skin, by which we have achieved a 15-fold improvement of localization accuracy (from 6 mm to 0.4 mm) as well as the ability to measure the force magnitude. Different from the existing super-resolution methods that rely on overlapping signals of neighbouring taxels, our model only relies on the local information from a single 3-axis taxel and thereby can detect multipoint contact applied on neighboring taxels and work properly even when some of the neighbouring taxels near the contact position are damaged (or unavailable). With this property, our method would be robust to damage and could potentially benefit robotic applications that require multipoint contact detection.},
keywords = {Authorship - Co-Author, JCR Q2, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track},
pubstate = {published},
tppubtype = {article}
}
Baiyue Wang, Weijie Guo, Shihao Feng, Hongdong Yi, Fang Wan, Chaoyang Song
Volumetrically Enhanced Soft Actuator with Proprioceptive Sensing Journal Article
In: IEEE Robotics and Automation Letters, vol. 6, iss. July, no. 3, pp. 5284-5291, 2021.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q2, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track
@article{Wang2021VolumetricallyEnhanced,
title = {Volumetrically Enhanced Soft Actuator with Proprioceptive Sensing},
author = {Baiyue Wang and Weijie Guo and Shihao Feng and Hongdong Yi and Fang Wan and Chaoyang Song},
doi = {10.1109/LRA.2021.3072859},
year = {2021},
date = {2021-07-01},
urldate = {2021-07-01},
journal = {IEEE Robotics and Automation Letters},
volume = {6},
number = {3},
issue = {July},
pages = {5284-5291},
abstract = {Soft robots often show a superior power-to-weight ratio using highly compliant, light-weight material, which leverages various bio-inspired body designs to generate desirable deformations for life-like motions. In this letter, given that most material used for soft robots is light-weight in general, we propose a volumetrically enhanced design strategy for soft robots, providing a novel design guideline to govern the form factor of soft robots. We present the design, modeling, and optimization of a volumetrically enhanced soft actuator (VESA) with linear and rotary motions, respectively, achieving superior force and torque output, linear and rotary displacement, and overall extension ratio per unit volume. We further explored VESA's proprioceptive sensing capability by validating the output force and torque through analytical modeling and experimental verification. Our results show that the volumetric metrics hold the potential to be used as a practical design guideline to optimize soft robots’ engineering performance.},
keywords = {Authorship - Corresponding, JCR Q2, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track},
pubstate = {published},
tppubtype = {article}
}
Linhan Yang, Xudong Han, Weijie Guo, Fang Wan, Jia Pan, Chaoyang Song
Learning-based Optoelectronically Innervated Tactile Finger for Rigid-Soft Interactive Grasping Journal Article
In: IEEE Robotics and Automation Letters, vol. 6, iss. April, no. 2, pp. 3817 - 3824, 2021.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q2, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track
@article{Yang2021LearningBased,
title = {Learning-based Optoelectronically Innervated Tactile Finger for Rigid-Soft Interactive Grasping},
author = {Linhan Yang and Xudong Han and Weijie Guo and Fang Wan and Jia Pan and Chaoyang Song},
doi = {10.1109/LRA.2021.3065186},
year = {2021},
date = {2021-04-01},
urldate = {2021-04-01},
booktitle = {IEEE International Conference on Robotics and Automation (ICRA)},
journal = {IEEE Robotics and Automation Letters},
volume = {6},
number = {2},
issue = {April},
pages = {3817 - 3824},
address = {Xi’an, China},
abstract = {This letter presents a novel design of a soft tactile finger with omni-directional adaptation using multi-channel optical fibers for rigid-soft interactive grasping. Machine learning methods are used to train a model for real-time prediction of force, torque, and contact using the tactile data collected. We further integrated such fingers in a reconfigurable gripper design with three fingers so that the finger arrangement can be actively adjusted in real-time based on the tactile data collected during grasping, achieving the process of rigid-soft interactive grasping. Detailed sensor calibration and experimental results are also included to further validate the proposed design for enhanced grasping robustness. Video: https://www.youtube.com/watch?v=ynCfSA4FQnY.},
keywords = {Authorship - Corresponding, JCR Q2, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track},
pubstate = {published},
tppubtype = {article}
}
Fang Wan, Haokun Wang, Jiyuan Wu, Yujia Liu, Sheng Ge, Chaoyang Song
A Reconfigurable Design for Omni-adaptive Grasp Learning Journal Article
In: IEEE Robotics and Automation Letters, vol. 5, iss. July, no. 3, pp. 4210-4217, 2020.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q2, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track
@article{Wan2020AReconfigurable,
title = {A Reconfigurable Design for Omni-adaptive Grasp Learning},
author = {Fang Wan and Haokun Wang and Jiyuan Wu and Yujia Liu and Sheng Ge and Chaoyang Song},
doi = {10.1109/lra.2020.2982059},
year = {2020},
date = {2020-07-01},
urldate = {2020-07-01},
journal = {IEEE Robotics and Automation Letters},
volume = {5},
number = {3},
issue = {July},
pages = {4210-4217},
abstract = {The engineering design of robotic grippers presents an ample design space for optimization towards robust grasping. In this letter, we investigate how learning method can be used to support the design reconfiguration of robotic grippers for grasping using a novel soft structure with omni-directional adaptation. We propose a gripper system that is reconfigurable in terms of the number and arrangement of the proposed finger, which generates a large number of possible design configurations. Such design reconfigurations with omni-adaptive fingers enables us to systematically investigate the optimal arrangement of the fingers towards robust grasping. Furthermore, we adopt a learning-based method as the baseline to benchmark the effectiveness of each design configuration. As a result, we found that the 3-finger radial configuration is suitable for space-saving and cost-effectiveness, achieving an average 96% grasp success rate on seen and novel objects selected from the YCB dataset. The 4-finger radial arrangement can be applied to cases that require a higher payload with even distribution. We achieved dimension reduction using the radial gripper design with the removal of z-axis rotation during grasping. We also reported the different outcomes with or without friction enhancement of the soft finger network.},
keywords = {Authorship - Corresponding, JCR Q2, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track},
pubstate = {published},
tppubtype = {article}
}
Linhan Yang, Fang Wan, Haokun Wang, Xiaobo Liu, Yujia Liu, Jia Pan, Chaoyang Song
Rigid-Soft Interactive Learning for Robust Grasping Journal Article
In: IEEE Robotics and Automation Letters, vol. 5, iss. April, no. 2, pp. 1720 - 1727, 2020.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, JCR Q2, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track
@article{Yang2020RigidSoft,
title = {Rigid-Soft Interactive Learning for Robust Grasping},
author = {Linhan Yang and Fang Wan and Haokun Wang and Xiaobo Liu and Yujia Liu and Jia Pan and Chaoyang Song},
doi = {10.1109/lra.2020.2969932},
year = {2020},
date = {2020-04-01},
urldate = {2020-04-01},
booktitle = {IEEE International Conference on Robotics and Automation (ICRA)},
journal = {IEEE Robotics and Automation Letters},
volume = {5},
number = {2},
issue = {April},
pages = {1720 - 1727},
address = {Paris, France},
abstract = {Robot learning is widely accepted by academia and industry with its potentials to transform autonomous robot control through machine learning. Inspired by widely used soft fingers on grasping, we propose a method of rigid-soft interactive learning, aiming at reducing the time of data collection. In this letter, we classify the interaction categories into Rigid-Rigid, Rigid-Soft, SoftRigid according to the interaction surface between grippers and target objects. We find experimental evidence that the interaction types between grippers and target objects play an essential role in the learning methods. We use soft, stuffed toys for training, instead of everyday objects, to reduce the integration complexity and computational burden. Although the stuffed toys are limited in reflecting the physics of finger-object interaction in real-life scenarios, we exploit such rigid-soft interaction by changing the gripper fingers to the soft ones when dealing with rigid, daily-life items such as the Yale-CMU-Berkeley (YCB) objects. With a small data collection of 5 K picking attempts in total, our results suggest that such Rigid-Soft and Soft-Rigid interactions are transferable. Moreover, the combination of such interactions shows better performance on the grasping test. We also explore the effect of the grasp type on the learning method by changing the gripper configurations. We achieve the best grasping performance at 97.5% for easy YCB objects and 81.3% for difficult YCB objects while using a precise grasp with a two-soft-finger gripper to collect training data and power grasp with a four-soft-finger gripper to test the grasp policy.},
keywords = {Authorship - Corresponding, JCR Q2, Jour - IEEE Robot. Autom. Lett. (RA-L), Special - Dual-Track},
pubstate = {published},
tppubtype = {article}
}
Conference Papers
Xudong Han, Ning Guo, Shuqiao Zhong, Zhiyuan Zhou, Jian Lin, Chaoyang Song, Fang Wan
Proprioceptive State Estimation for Amphibious Tactile Sensing Conference
IEEE International Conference on Robotics and Automation (ICRA2025), Atlanta, USA, 2025, (Dual-track Submission with TRO: https://doi.org/10.1109/TRO.2024.3463509).
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Conf - ICRA, Special - Dual-Track
@conference{Han2025ProprioceptiveState,
title = {Proprioceptive State Estimation for Amphibious Tactile Sensing},
author = {Xudong Han and Ning Guo and Shuqiao Zhong and Zhiyuan Zhou and Jian Lin and Chaoyang Song and Fang Wan},
url = {https://github.com/ancorasir/PropSE},
doi = {10.1109/TRO.2024.3463509},
year = {2025},
date = {2025-03-07},
urldate = {2025-03-07},
booktitle = {IEEE International Conference on Robotics and Automation (ICRA2025)},
address = {Atlanta, USA},
abstract = {This paper presents a novel vision-based proprioception approach for a soft robotic finger that can estimate and reconstruct tactile interactions in terrestrial and aquatic environments. The key to this system lies in the finger's unique metamaterial structure, which facilitates omni-directional passive adaptation during grasping, protecting delicate objects across diverse scenarios. A compact in-finger camera captures high-framerate images of the finger's deformation during contact, extracting crucial tactile data in real time. We present a volumetric discretized model of the soft finger and use the geometry constraints captured by the camera to find the optimal estimation of the deformed shape. The approach is benchmarked using a motion capture system with sparse markers and a haptic device with dense measurements. Both results show state-of-the-art accuracies, with a median error of 1.96 mm for overall body deformation, corresponding to 2.1% of the finger's length. More importantly, the state estimation is robust in both on-land and underwater environments, as we demonstrate its usage for underwater object shape sensing. This combination of passive adaptation and real-time tactile sensing paves the way for amphibious robotic grasping applications. All codes are shared on GitHub: https://github.com/ancorasir/PropSE.},
note = {Dual-track Submission with TRO: https://doi.org/10.1109/TRO.2024.3463509},
keywords = {Authorship - Corresponding, Conf - ICRA, Special - Dual-Track},
pubstate = {published},
tppubtype = {conference}
}
Linhan Yang, Bidan Huang, Qingbiao Li, Ya-Yen Tsai, Wang Wei Lee, Chaoyang Song, Jia Pan
IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS2023), Huntington Place, Detroit, Michigan, USA, 2023, (Dual-track Submission with RAL: https://doi.org/10.1109/LRA.2023.3264759).
Abstract | Links | BibTeX | Tags: Authorship - Co-Author, Conf - IROS, Special - Dual-Track
@conference{Yang2023TacGNN-IROS,
title = {TacGNN: Learning Tactile-Based In-Hand Manipulation with a Blind Robot Using Hierarchical Graph Neural Network},
author = {Linhan Yang and Bidan Huang and Qingbiao Li and Ya-Yen Tsai and Wang Wei Lee and Chaoyang Song and Jia Pan},
url = {https://ieee-iros.org/},
year = {2023},
date = {2023-10-01},
urldate = {2023-10-01},
booktitle = {IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS2023)},
address = {Huntington Place, Detroit, Michigan, USA},
abstract = {In this letter, we propose a novel framework for tactile-based dexterous manipulation learning with a blind anthropomorphic robotic hand, i.e. without visual sensing. First, object-related states were extracted from the raw tactile signals by a graph-based perception model - TacGNN. The resulting tactile features were then utilized in the policy learning of an in-hand manipulation task in the second stage. This method was examined by a Baoding ball task - simultaneously manipulating two spheres around each other by 180 degrees in hand. We conducted experiments on object states prediction and in-hand manipulation using a reinforcement learning algorithm (PPO). Results show that TacGNN is effective in predicting object-related states during manipulation by decreasing the RMSE of prediction to 0.096 cm comparing to other methods, such as MLP, CNN, and GCN. Finally, the robot hand could finish an in-hand manipulation task solely relying on the robotic own perception - tactile sensing and proprioception. In addition, our methods are tested on three tasks with different difficulty levels and transferred to the real robot without further training.},
note = {Dual-track Submission with RAL: https://doi.org/10.1109/LRA.2023.3264759},
keywords = {Authorship - Co-Author, Conf - IROS, Special - Dual-Track},
pubstate = {published},
tppubtype = {conference}
}
Youcan Yan, Yajing Shen, Chaoyang Song, Jia Pan
Tactile Super-Resolution Model for Soft Magnetic Skin Conference
IEEE International Conference on Robotics and Automation (ICRA2022), Philadelphia (PA), USA, 2022, (Dual-track Submission with RAL: https://doi.org/10.1109/LRA.2022.3141449).
Abstract | Links | BibTeX | Tags: Authorship - Co-Author, Conf - ICRA, Special - Dual-Track
@conference{Yan2022TactileSuper-ICRA,
title = {Tactile Super-Resolution Model for Soft Magnetic Skin},
author = {Youcan Yan and Yajing Shen and Chaoyang Song and Jia Pan},
doi = {10.1109/LRA.2022.3141449},
year = {2022},
date = {2022-01-10},
urldate = {2022-01-10},
booktitle = {IEEE International Conference on Robotics and Automation (ICRA2022)},
issue = {2},
address = {Philadelphia (PA), USA},
abstract = {Tactile sensors of high spatial resolution can provide rich contact information in terms of accurate contact location and force magnitude for robots. However, achieving a high spatial resolution normally requires a high density of tactile sensing cells (or taxels), which will inevitably lead to crowded wire connections, more data acquisition time and probably crosstalk between taxels. An alternative approach to improve the spatial resolution without introducing a high density of taxels is employing super-resolution technology. Here, we propose a novel tactile super-resolution method based on a sinusoidally magnetized soft magnetic skin, by which we have achieved a 15-fold improvement of localization accuracy (from 6 mm to 0.4 mm) as well as the ability to measure the force magnitude. Different from the existing super-resolution methods that rely on overlapping signals of neighbouring taxels, our model only relies on the local information from a single 3-axis taxel and thereby can detect multipoint contact applied on neighboring taxels and work properly even when some of the neighbouring taxels near the contact position are damaged (or unavailable). With this property, our method would be robust to damage and could potentially benefit robotic applications that require multipoint contact detection.},
note = {Dual-track Submission with RAL: https://doi.org/10.1109/LRA.2022.3141449},
keywords = {Authorship - Co-Author, Conf - ICRA, Special - Dual-Track},
pubstate = {published},
tppubtype = {conference}
}
Linhan Yang, Xudong Han, Weijie Guo, Fang Wan, Jia Pan, Chaoyang Song
Learning-based Optoelectronically Innervated Tactile Finger for Rigid-Soft Interactive Grasping Conference
IEEE International Conference on Robotics and Automation (ICRA2021), Xi’an, China, 2021, (Dual-track Submission with RAL: https://doi.org/10.1109/LRA.2021.3065186).
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Conf - ICRA, Special - Dual-Track
@conference{Yang2021LearningBased-ICRA,
title = {Learning-based Optoelectronically Innervated Tactile Finger for Rigid-Soft Interactive Grasping},
author = {Linhan Yang and Xudong Han and Weijie Guo and Fang Wan and Jia Pan and Chaoyang Song},
doi = {10.1109/LRA.2021.3065186},
year = {2021},
date = {2021-05-30},
urldate = {2021-05-30},
booktitle = {IEEE International Conference on Robotics and Automation (ICRA2021)},
issue = {2},
address = {Xi’an, China},
abstract = {This letter presents a novel design of a soft tactile finger with omni-directional adaptation using multi-channel optical fibers for rigid-soft interactive grasping. Machine learning methods are used to train a model for real-time prediction of force, torque, and contact using the tactile data collected. We further integrated such fingers in a reconfigurable gripper design with three fingers so that the finger arrangement can be actively adjusted in real-time based on the tactile data collected during grasping, achieving the process of rigid-soft interactive grasping. Detailed sensor calibration and experimental results are also included to further validate the proposed design for enhanced grasping robustness.},
note = {Dual-track Submission with RAL: https://doi.org/10.1109/LRA.2021.3065186},
keywords = {Authorship - Corresponding, Conf - ICRA, Special - Dual-Track},
pubstate = {published},
tppubtype = {conference}
}
Weijie Guo, Baiyue Wang, Shihao Feng, Hongdong Yi, Fang Wan, Chaoyang Song
Volumetrically Enhanced Soft Actuator with Proprioceptive Sensing Conference
IEEE International Conference on Soft Robotics (RoboSoft2021), New Haven, CT, USA, 2021, (Dual-track Submission with RAL: https://doi.org/10.1109/LRA.2021.3072859).
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Conf - RoboSoft, Special - Dual-Track
@conference{Guo2021VolumetricallyEnhanced,
title = {Volumetrically Enhanced Soft Actuator with Proprioceptive Sensing},
author = {Weijie Guo and Baiyue Wang and Shihao Feng and Hongdong Yi and Fang Wan and Chaoyang Song},
doi = {10.1109/LRA.2021.3072859},
year = {2021},
date = {2021-04-12},
urldate = {2021-04-12},
booktitle = {IEEE International Conference on Soft Robotics (RoboSoft2021)},
address = {New Haven, CT, USA},
abstract = {Soft robots often show a superior power-to-weight ratio using highly compliant, light-weight material, which leverages various bio-inspired body designs to generate desirable deformations for life-like motions. In this letter, given that most material used for soft robots is light-weight in general, we propose a volumetrically enhanced design strategy for soft robots, providing a novel design guideline to govern the form factor of soft robots. We present the design, modeling, and optimization of a volumetrically enhanced soft actuator (VESA) with linear and rotary motions, respectively, achieving superior force and torque output, linear and rotary displacement, and overall extension ratio per unit volume. We further explored VESA's proprioceptive sensing capability by validating the output force and torque through analytical modeling and experimental verification. Our results show that the volumetric metrics hold the potential to be used as a practical design guideline to optimize soft robots’ engineering performance.},
note = {Dual-track Submission with RAL: https://doi.org/10.1109/LRA.2021.3072859},
keywords = {Authorship - Corresponding, Conf - RoboSoft, Special - Dual-Track},
pubstate = {published},
tppubtype = {conference}
}
Linhan Yang, Fang Wan, Haokun Wang, Xiaobo Liu, Yujia Liu, Jia Pan, Chaoyang Song
Rigid-Soft Interactive Learning for Robust Grasping Conference
IEEE International Conference on Robotics and Automation (ICRA2020), Paris, France, 2020, (Dual-track Submission with RAL: https://doi.org/10.1109/lra.2020.2969932).
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Conf - ICRA, Special - Dual-Track
@conference{Yang2020RigidSoft-ICRA,
title = {Rigid-Soft Interactive Learning for Robust Grasping},
author = {Linhan Yang and Fang Wan and Haokun Wang and Xiaobo Liu and Yujia Liu and Jia Pan and Chaoyang Song},
doi = {10.1109/LRA.2020.2969932},
year = {2020},
date = {2020-05-31},
urldate = {2020-05-31},
booktitle = {IEEE International Conference on Robotics and Automation (ICRA2020)},
address = {Paris, France},
abstract = {Robot learning is widely accepted by academia and industry with its potentials to transform autonomous robot control through machine learning. Inspired by widely used soft fingers on grasping, we propose a method of rigid-soft interactive learning, aiming at reducing the time of data collection. In this letter, we classify the interaction categories into Rigid-Rigid, Rigid-Soft, SoftRigid according to the interaction surface between grippers and target objects. We find experimental evidence that the interaction types between grippers and target objects play an essential role in the learning methods. We use soft, stuffed toys for training, instead of everyday objects, to reduce the integration complexity and computational burden. Although the stuffed toys are limited in reflecting the physics of finger-object interaction in real-life scenarios, we exploit such rigid-soft interaction by changing the gripper fingers to the soft ones when dealing with rigid, daily-life items such as the Yale-CMU-Berkeley (YCB) objects. With a small data collection of 5 K picking attempts in total, our results suggest that such Rigid-Soft and Soft-Rigid interactions are transferable. Moreover, the combination of such interactions shows better performance on the grasping test. We also explore the effect of the grasp type on the learning method by changing the gripper configurations. We achieve the best grasping performance at 97.5% for easy YCB objects and 81.3% for difficult YCB objects while using a precise grasp with a two-soft-finger gripper to collect training data and power grasp with a four-soft-finger gripper to test the grasp policy.},
note = {Dual-track Submission with RAL: https://doi.org/10.1109/lra.2020.2969932},
keywords = {Authorship - Corresponding, Conf - ICRA, Special - Dual-Track},
pubstate = {published},
tppubtype = {conference}
}
Fang Wan, Haokun Wang, Jiyuan Wu, Yujia Liu, Sheng Ge, Chaoyang Song
A Reconfigurable Design for Omni-Adaptive Grasp Learning Conference
IEEE International Conference on Soft Robotics (RoboSoft2020), New Haven, CT, USA, 2020, (Dual-track Submission with RAL: https://doi.org/10.1109/lra.2020.2982059).
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Conf - RoboSoft, Special - Dual-Track
@conference{Wan2020ReconfigurableDesign,
title = {A Reconfigurable Design for Omni-Adaptive Grasp Learning},
author = {Fang Wan and Haokun Wang and Jiyuan Wu and Yujia Liu and Sheng Ge and Chaoyang Song},
doi = {10.1109/LRA.2020.2982059},
year = {2020},
date = {2020-05-15},
urldate = {2020-05-15},
booktitle = {IEEE International Conference on Soft Robotics (RoboSoft2020)},
address = {New Haven, CT, USA},
abstract = {The engineering design of robotic grippers presents an ample design space for optimization towards robust grasping. In this letter, we investigate how learning method can be used to support the design reconfiguration of robotic grippers for grasping using a novel soft structure with omni-directional adaptation. We propose a gripper system that is reconfigurable in terms of the number and arrangement of the proposed finger, which generates a large number of possible design configurations. Such design reconfigurations with omni-adaptive fingers enables us to systematically investigate the optimal arrangement of the fingers towards robust grasping. Furthermore, we adopt a learning-based method as the baseline to benchmark the effectiveness of each design configuration. As a result, we found that the 3-finger radial configuration is suitable for space-saving and cost-effectiveness, achieving an average 96% grasp success rate on seen and novel objects selected from the YCB dataset. The 4-finger radial arrangement can be applied to cases that require a higher payload with even distribution. We achieved dimension reduction using the radial gripper design with the removal of z-axis rotation during grasping. We also reported the different outcomes with or without friction enhancement of the soft finger network.},
note = {Dual-track Submission with RAL: https://doi.org/10.1109/lra.2020.2982059},
keywords = {Authorship - Corresponding, Conf - RoboSoft, Special - Dual-Track},
pubstate = {published},
tppubtype = {conference}
}
Extended Abstracts
Sorry, no publications matched your criteria.
Doctoral Thesis
Sorry, no publications matched your criteria.