




Working Papers
Sorry, no publications matched your criteria.
Under Review
Sorry, no publications matched your criteria.
Journal Articles
Ning Guo, Xudong Han, Shuqiao Zhong, Zhiyuan Zhou, Jian Lin, Fang Wan, Chaoyang Song
Reconstructing Soft Robotic Touch via In-Finger Vision Journal Article
In: Advanced Intelligent Systems, vol. 6, iss. October, no. 10, pp. 2400022, 2024.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Award - Front Cover, JCR Q1, Jour - Adv. Intell. Syst. (AIS)
@article{Guo2024ReconstructingSoft,
title = {Reconstructing Soft Robotic Touch via In-Finger Vision},
author = {Ning Guo and Xudong Han and Shuqiao Zhong and Zhiyuan Zhou and Jian Lin and Fang Wan and Chaoyang Song},
doi = {10.1002/aisy.202400022},
year = {2024},
date = {2024-10-01},
urldate = {2024-10-01},
journal = {Advanced Intelligent Systems},
volume = {6},
number = {10},
issue = {October},
pages = {2400022},
abstract = {Incorporating authentic tactile interactions into virtual environments presents a notable challenge for the emerging development of soft robotic metamaterials. In this study, a vision-based approach is introduced to learning proprioceptive interactions by simultaneously reconstructing the shape and touch of a soft robotic metamaterial (SRM) during physical engagements. The SRM design is optimized to the size of a finger with enhanced adaptability in 3D interactions while incorporating a see-through viewing field inside, which can be visually captured by a miniature camera underneath to provide a rich set of image features for touch digitization. Employing constrained geometric optimization, the proprioceptive process with aggregated multi-handles is modeled. This approach facilitates real-time, precise, and realistic estimations of the finger's mesh deformation within a virtual environment. Herein, a data-driven learning model is also proposed to estimate touch positions, achieving reliable results with impressive R2 scores of 0.9681, 0.9415, and 0.9541 along the x, y, and z axes. Furthermore, the robust performance of the proposed methods in touch-based human–cybernetic interfaces and human–robot collaborative grasping is demonstrated. In this study, the door is opened to future applications in touch-based digital twin interactions through vision-based soft proprioception.},
keywords = {Authorship - Corresponding, Award - Front Cover, JCR Q1, Jour - Adv. Intell. Syst. (AIS)},
pubstate = {published},
tppubtype = {article}
}
Ning Guo, Xudong Han, Xiaobo Liu, Shuqiao Zhong, Zhiyuan Zhou, Jian Lin, Jiansheng Dai, Fang Wan, Chaoyang Song
Autoencoding a Soft Touch to Learn Grasping from On-land to Underwater Journal Article
In: Advanced Intelligent Systems, vol. 6, iss. January, no. 1, pp. 2300382, 2024.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Award - Front Cover, JCR Q1, Jour - Adv. Intell. Syst. (AIS)
@article{Guo2024AutoencodingA,
title = {Autoencoding a Soft Touch to Learn Grasping from On-land to Underwater},
author = {Ning Guo and Xudong Han and Xiaobo Liu and Shuqiao Zhong and Zhiyuan Zhou and Jian Lin and Jiansheng Dai and Fang Wan and Chaoyang Song},
doi = {10.1002/aisy.202300382},
year = {2024},
date = {2024-01-01},
urldate = {2024-01-01},
journal = {Advanced Intelligent Systems},
volume = {6},
number = {1},
issue = {January},
pages = {2300382},
abstract = {Robots play a critical role as the physical agent of human operators in exploring the ocean. However, it remains challenging to grasp objects reliably while fully submerging under a highly pressurized aquatic environment with little visible light, mainly due to the fluidic interference on the tactile mechanics between the finger and object surfaces. This study investigates the transferability of grasping knowledge from on-land to underwater via a vision-based soft robotic finger that learns 6D forces and torques (FT) using a supervised variational autoencoder (SVAE). A high-framerate camera captures the whole-body deformations while a soft robotic finger interacts with physical objects on-land and underwater. Results show that the trained SVAE model learns a series of latent representations of the soft mechanics transferable from land to water, presenting a superior adaptation to the changing environments against commercial FT sensors. Soft, delicate, and reactive grasping enabled by tactile intelligence enhances the gripper's underwater interaction with improved reliability and robustness at a much-reduced cost, paving the path for learning-based intelligent grasping to support fundamental scientific discoveries in environmental and ocean research.},
keywords = {Authorship - Corresponding, Award - Front Cover, JCR Q1, Jour - Adv. Intell. Syst. (AIS)},
pubstate = {published},
tppubtype = {article}
}
Jiayu Huo, Jingran Wang, Yuqin Guo, Wanghongjie Qiu, Mingdong Chen, Harry Asada, Fang Wan, Chaoyang Song
Reconfigurable Design and Modeling of an Underwater Superlimb for Diving Assistance Journal Article
In: Advanced Intelligent Systems, vol. 5, iss. November, no. 11, pp. 2300245, 2023.
Abstract | Links | BibTeX | Tags: Authorship - Corresponding, Award - Back Cover, Award - Editor's Choice, JCR Q1, Jour - Adv. Intell. Syst. (AIS)
@article{Huo2023ReconfigurableDesign,
title = {Reconfigurable Design and Modeling of an Underwater Superlimb for Diving Assistance},
author = {Jiayu Huo and Jingran Wang and Yuqin Guo and Wanghongjie Qiu and Mingdong Chen and Harry Asada and Fang Wan and Chaoyang Song},
doi = {10.1002/aisy.202300245},
year = {2023},
date = {2023-08-17},
urldate = {2023-08-17},
journal = {Advanced Intelligent Systems},
volume = {5},
number = {11},
issue = {November},
pages = {2300245},
abstract = {This study presents the design of an underwater superlimb as a wearable robot, providing divers with mobility assistance and freeing their hands for manipulating tools underwater. The wearable design features a thrust vectoring system with two 3D-printed, waterproofed modules. The module with adjustable connections and strapping holes is designed to enable reconfiguration for multiple purposes, including regular use as an underwater superlimb for divers, manually operated as a handheld glider for swimmers, combined with an amphibian, legged robot as a quadruped superlimb, and coupled as a dual-unit autonomous underwater vehicle for underwater navigation. The kinematics and dynamics of the prototype and all of its reconfigured modes are developed. A sliding-mode controller is also introduced to achieve stable simulation in PyBullet. Field tests further support the feasibility of the underwater superlimb when worn on a test diver in a swimming pool. As the first underwater superlimb presented in the literature, this study opens new doors for supernumerary robotic limbs in underwater scenarios with multifunctional reconfiguration.},
keywords = {Authorship - Corresponding, Award - Back Cover, Award - Editor's Choice, JCR Q1, Jour - Adv. Intell. Syst. (AIS)},
pubstate = {published},
tppubtype = {article}
}
Conference Papers
Sorry, no publications matched your criteria.
Extended Abstracts
Sorry, no publications matched your criteria.
Doctoral Thesis
Sorry, no publications matched your criteria.