Abstract
Recent research shows a growing interest in adopting touch interaction for robot learning, yet it remains challenging to efficiently acquire high-quality, structured tactile data at a low cost. In this study, we propose the design of vision-based soft robotic tongs to generate reproducible and shareable data of tactile interaction for learning. We further developed a web-based platform for convenient data collection and a portable assembly that can be deployed within minutes. We trained a simple network to infer the 6D force and torque using relative pose data from markers on the fingers and reached a reasonably high accuracy (an MAE of 0.548 N at 60 Hz within [0,20] N) but cost only 50 USD per set. The recorded tactile data is downloadable for robot learning. We further demonstrated the system for interacting with robotic arms in manipulation learning and remote control. We have open-sourced the system on GitHub with further information. (https://github.com/bionicdl-sustech/SoftRoboticTongs)
Links
BibTeX (Download)
@conference{Wu2024VisionBasedb, title = {Vision-based, Low-cost, Soft Robotic Tongs for Shareable and Reproducible Tactile Learning}, author = {Tianyu Wu and Yujian Dong and Yang Xiao and Jinqi Wei and Fang Wan and Chaoyang Song}, url = {http://www.ieee-arm.org/}, year = {2024}, date = {2024-06-01}, urldate = {2024-06-01}, booktitle = {IEEE International Conference on Advanced Robotics and Mechatronics (ICARM2024)}, address = {Tokyo, Japan}, abstract = {Recent research shows a growing interest in adopting touch interaction for robot learning, yet it remains challenging to efficiently acquire high-quality, structured tactile data at a low cost. In this study, we propose the design of vision-based soft robotic tongs to generate reproducible and shareable data of tactile interaction for learning. We further developed a web-based platform for convenient data collection and a portable assembly that can be deployed within minutes. We trained a simple network to infer the 6D force and torque using relative pose data from markers on the fingers and reached a reasonably high accuracy (an MAE of 0.548 N at 60 Hz within [0,20] N) but cost only 50 USD per set. The recorded tactile data is downloadable for robot learning. We further demonstrated the system for interacting with robotic arms in manipulation learning and remote control. We have open-sourced the system on GitHub with further information. (https://github.com/bionicdl-sustech/SoftRoboticTongs)}, note = {Accepted}, keywords = {Corresponding Author, ICARM}, pubstate = {forthcoming}, tppubtype = {conference} }