




Working Papers
Sorry, no publications matched your criteria.
Under Review
Sorry, no publications matched your criteria.
Journal Articles
Sorry, no publications matched your criteria.
Conference Papers
Linhan Yang, Bidan Huang, Qingbiao Li, Ya-Yen Tsai, Wang Wei Lee, Chaoyang Song, Jia Pan
IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS2023), Huntington Place, Detroit, Michigan, USA, 2023, (Dual-track Submission with RAL: https://doi.org/10.1109/LRA.2023.3264759).
Abstract | Links | BibTeX | Tags: Authorship - Co-Author, Conf - IROS, Special - Dual-Track
@conference{Yang2023TacGNN-IROS,
title = {TacGNN: Learning Tactile-Based In-Hand Manipulation with a Blind Robot Using Hierarchical Graph Neural Network},
author = {Linhan Yang and Bidan Huang and Qingbiao Li and Ya-Yen Tsai and Wang Wei Lee and Chaoyang Song and Jia Pan},
url = {https://ieee-iros.org/},
year = {2023},
date = {2023-10-01},
urldate = {2023-10-01},
booktitle = {IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS2023)},
address = {Huntington Place, Detroit, Michigan, USA},
abstract = {In this letter, we propose a novel framework for tactile-based dexterous manipulation learning with a blind anthropomorphic robotic hand, i.e. without visual sensing. First, object-related states were extracted from the raw tactile signals by a graph-based perception model - TacGNN. The resulting tactile features were then utilized in the policy learning of an in-hand manipulation task in the second stage. This method was examined by a Baoding ball task - simultaneously manipulating two spheres around each other by 180 degrees in hand. We conducted experiments on object states prediction and in-hand manipulation using a reinforcement learning algorithm (PPO). Results show that TacGNN is effective in predicting object-related states during manipulation by decreasing the RMSE of prediction to 0.096 cm comparing to other methods, such as MLP, CNN, and GCN. Finally, the robot hand could finish an in-hand manipulation task solely relying on the robotic own perception - tactile sensing and proprioception. In addition, our methods are tested on three tasks with different difficulty levels and transferred to the real robot without further training.},
note = {Dual-track Submission with RAL: https://doi.org/10.1109/LRA.2023.3264759},
keywords = {Authorship - Co-Author, Conf - IROS, Special - Dual-Track},
pubstate = {published},
tppubtype = {conference}
}
Extended Abstracts
Haoran Sun, Linhan Yang, Zeqing Zhang, Ning Guo, Lei Yang, Fang Wan, Chaoyang Song, Jia Pan
CopGNN: Learning End-to-End Cloth Coverage Prediction via Graph Neural Networks Workshop
2024, (Extended Abstract accepted to IROS 2024 Workshop on Benchmarking via Competitions in Robotic Grasping and Manipulation).
@workshop{Sun2024CopGNN,
title = {CopGNN: Learning End-to-End Cloth Coverage Prediction via Graph Neural Networks},
author = {Haoran Sun and Linhan Yang and Zeqing Zhang and Ning Guo and Lei Yang and Fang Wan and Chaoyang Song and Jia Pan},
url = {https://sites.google.com/view/iros2024-workshop-bench-in-rgm/},
year = {2024},
date = {2024-10-13},
urldate = {2024-10-13},
abstract = {Cloth manipulation in robotics, such as folding or unfolding fabrics, remains challenging due to deformable materials' complex and nonlinear dynamics, which can adopt infinite configurations. As Team Greater Bay, we participated in the ICRA 2024 Cloth Competition and scored an Average Coverage of 0.53 (the 1st place team scored 0.60). This extended abstract presents our Coverage Prediction Graph Neural Network (CopGNN) approach implemented for this competition. Instead of directly estimating the cloth's configuration, our method implicitly infers the unknown state using a Graph Neural Network (GNN). It predicts the resultant coverage area from multiple grasping points using a second GNN without relying on an explicit dynamics model. Contributions of this work include: (1) Developed a comprehensive simulation pipeline to generate a large-scale dataset tailored to the cloth manipulation task. (2) Proposed an end-to-end approach to predict the coverage area using only the hanging cloth's depth image. (3) Introduced a heuristic-based sampling strategy to enhance the robustness of zero-shot sim-to-real transfer.},
note = {Extended Abstract accepted to IROS 2024 Workshop on Benchmarking via Competitions in Robotic Grasping and Manipulation},
keywords = {},
pubstate = {published},
tppubtype = {workshop}
}
Tianyu Wu, Sheng Ge, Yujian Dong, Ronghan Xu, Fang Wan, Chaoyang Song
From DeepClaw to MagiClaw: Towards Universal Action Embodiment Workshop
2024, (Extended Abstract accepted to IROS 2024 Workshop on Environment Dynamics Matters: Embodied Navigation to Movable Objects).
@workshop{Wu2024MagiClaw,
title = {From DeepClaw to MagiClaw: Towards Universal Action Embodiment},
author = {Tianyu Wu and Sheng Ge and Yujian Dong and Ronghan Xu and Fang Wan and Chaoyang Song},
url = {https://edmws.github.io/},
year = {2024},
date = {2024-10-13},
urldate = {2024-10-13},
note = {Extended Abstract accepted to IROS 2024 Workshop on Environment Dynamics Matters: Embodied Navigation to Movable Objects},
keywords = {},
pubstate = {published},
tppubtype = {workshop}
}
Jianwen Luo, Sicong Liu, Chengyu Lin, Yong Zhou, Zixuan Fan, Zheng Wang, Chaoyang Song, Harry Asada, Chenglong Fu
Mapping Human Muscle Force to Supernumerary Robotics Device for Overhead Task Assistance Workshop
2020, (Extended Abstract accepted to the IEEE/ASME AIM 2020 Workshop on Supernumerary Robotic Devices).
@workshop{Luo2020MappingHuman,
title = {Mapping Human Muscle Force to Supernumerary Robotics Device for Overhead Task Assistance},
author = {Jianwen Luo and Sicong Liu and Chengyu Lin and Yong Zhou and Zixuan Fan and Zheng Wang and Chaoyang Song and Harry Asada and Chenglong Fu},
url = {https://aim2020srd.wixsite.com/aim2020srd},
doi = {10.48550/arXiv.2107.13799},
year = {2020},
date = {2020-11-01},
urldate = {2020-11-01},
note = {Extended Abstract accepted to the IEEE/ASME AIM 2020 Workshop on Supernumerary Robotic Devices},
keywords = {},
pubstate = {published},
tppubtype = {workshop}
}
Doctoral Thesis
Sorry, no publications matched your criteria.