13 changed files with 1050 additions and 323 deletions
-
8requirements.txt
-
4teleop/open_television/__init__.py
-
77teleop/open_television/_test_television.py
-
80teleop/open_television/_test_tv_wrapper.py
-
53teleop/open_television/constants.py
-
20teleop/open_television/setup.py
-
476teleop/open_television/television.py
-
470teleop/open_television/tv_wrapper.py
-
5teleop/robot_control/robot_arm_ik.py
-
6teleop/robot_control/robot_hand_inspire.py
-
70teleop/robot_control/robot_hand_unitree.py
-
90teleop/teleop_hand_and_arm.py
-
14teleop/utils/mat_tool.py
@ -0,0 +1,4 @@ |
|||||
|
# open_television/__init__.py |
||||
|
# openssl req -x509 -nodes -days 365 -newkey rsa:2048 -keyout key.pem -out cert.pem |
||||
|
from .television import TeleVision |
||||
|
from .tv_wrapper import TeleVisionWrapper, TeleData, TeleStateData |
||||
@ -0,0 +1,77 @@ |
|||||
|
import os, sys |
||||
|
this_file = os.path.abspath(__file__) |
||||
|
project_root = os.path.abspath(os.path.join(os.path.dirname(this_file), '..')) |
||||
|
if project_root not in sys.path: |
||||
|
sys.path.insert(0, project_root) |
||||
|
|
||||
|
import time |
||||
|
import numpy as np |
||||
|
from multiprocessing import shared_memory |
||||
|
from open_television import TeleVision |
||||
|
|
||||
|
def run_test_television(): |
||||
|
# image |
||||
|
image_shape = (480, 640 * 2, 3) |
||||
|
image_shm = shared_memory.SharedMemory(create=True, size=np.prod(image_shape) * np.uint8().itemsize) |
||||
|
image_array = np.ndarray(image_shape, dtype=np.uint8, buffer=image_shm.buf) |
||||
|
|
||||
|
# television |
||||
|
use_hand_track = False |
||||
|
tv = TeleVision(binocular = True, use_hand_tracking = use_hand_track, img_shape = image_shape, img_shm_name = image_shm.name, webrtc=True) |
||||
|
|
||||
|
try: |
||||
|
input("Press Enter to start television test...") |
||||
|
running = True |
||||
|
while running: |
||||
|
print("=" * 80) |
||||
|
print("Common Data (always available):") |
||||
|
print(f"head_pose shape: {tv.head_pose.shape}\n{tv.head_pose}\n") |
||||
|
print(f"left_arm_pose shape: {tv.left_arm_pose.shape}\n{tv.left_arm_pose}\n") |
||||
|
print(f"right_arm_pose shape: {tv.right_arm_pose.shape}\n{tv.right_arm_pose}\n") |
||||
|
print("=" * 80) |
||||
|
|
||||
|
if use_hand_track: |
||||
|
print("Hand Tracking Data:") |
||||
|
print(f"left_hand_positions shape: {tv.left_hand_positions.shape}\n{tv.left_hand_positions}\n") |
||||
|
print(f"right_hand_positions shape: {tv.right_hand_positions.shape}\n{tv.right_hand_positions}\n") |
||||
|
print(f"left_hand_orientations shape: {tv.left_hand_orientations.shape}\n{tv.left_hand_orientations}\n") |
||||
|
print(f"right_hand_orientations shape: {tv.right_hand_orientations.shape}\n{tv.right_hand_orientations}\n") |
||||
|
print(f"left_hand_pinch_state: {tv.left_hand_pinch_state}") |
||||
|
print(f"left_hand_pinch_value: {tv.left_hand_pinch_value}") |
||||
|
print(f"left_hand_squeeze_state: {tv.left_hand_squeeze_state}") |
||||
|
print(f"left_hand_squeeze_value: {tv.left_hand_squeeze_value}") |
||||
|
print(f"right_hand_pinch_state: {tv.right_hand_pinch_state}") |
||||
|
print(f"right_hand_pinch_value: {tv.right_hand_pinch_value}") |
||||
|
print(f"right_hand_squeeze_state: {tv.right_hand_squeeze_state}") |
||||
|
print(f"right_hand_squeeze_value: {tv.right_hand_squeeze_value}") |
||||
|
else: |
||||
|
print("Controller Data:") |
||||
|
print(f"left_controller_trigger_state: {tv.left_controller_trigger_state}") |
||||
|
print(f"left_controller_trigger_value: {tv.left_controller_trigger_value}") |
||||
|
print(f"left_controller_squeeze_state: {tv.left_controller_squeeze_state}") |
||||
|
print(f"left_controller_squeeze_value: {tv.left_controller_squeeze_value}") |
||||
|
print(f"left_controller_thumbstick_state: {tv.left_controller_thumbstick_state}") |
||||
|
print(f"left_controller_thumbstick_value: {tv.left_controller_thumbstick_value}") |
||||
|
print(f"left_controller_aButton: {tv.left_controller_aButton}") |
||||
|
print(f"left_controller_bButton: {tv.left_controller_bButton}") |
||||
|
print(f"right_controller_trigger_state: {tv.right_controller_trigger_state}") |
||||
|
print(f"right_controller_trigger_value: {tv.right_controller_trigger_value}") |
||||
|
print(f"right_controller_squeeze_state: {tv.right_controller_squeeze_state}") |
||||
|
print(f"right_controller_squeeze_value: {tv.right_controller_squeeze_value}") |
||||
|
print(f"right_controller_thumbstick_state: {tv.right_controller_thumbstick_state}") |
||||
|
print(f"right_controller_thumbstick_value: {tv.right_controller_thumbstick_value}") |
||||
|
print(f"right_controller_aButton: {tv.right_controller_aButton}") |
||||
|
print(f"right_controller_bButton: {tv.right_controller_bButton}") |
||||
|
print("=" * 80) |
||||
|
time.sleep(0.03) |
||||
|
except KeyboardInterrupt: |
||||
|
running = False |
||||
|
print("KeyboardInterrupt, exiting program...") |
||||
|
finally: |
||||
|
image_shm.unlink() |
||||
|
image_shm.close() |
||||
|
print("Finally, exiting program...") |
||||
|
exit(0) |
||||
|
|
||||
|
if __name__ == '__main__': |
||||
|
run_test_television() |
||||
@ -0,0 +1,80 @@ |
|||||
|
import os, sys |
||||
|
this_file = os.path.abspath(__file__) |
||||
|
project_root = os.path.abspath(os.path.join(os.path.dirname(this_file), '..')) |
||||
|
if project_root not in sys.path: |
||||
|
sys.path.insert(0, project_root) |
||||
|
|
||||
|
import numpy as np |
||||
|
import time |
||||
|
from multiprocessing import shared_memory |
||||
|
from open_television import TeleVisionWrapper |
||||
|
|
||||
|
def run_test_tv_wrapper(): |
||||
|
image_shape = (480, 640 * 2, 3) |
||||
|
image_shm = shared_memory.SharedMemory(create=True, size=np.prod(image_shape) * np.uint8().itemsize) |
||||
|
image_array = np.ndarray(image_shape, dtype=np.uint8, buffer=image_shm.buf) |
||||
|
use_hand_track=False |
||||
|
tv_wrapper = TeleVisionWrapper(binocular=True, use_hand_tracking=use_hand_track, img_shape=image_shape, img_shm_name=image_shm.name, |
||||
|
return_state_data=True, return_hand_rot_data = True) |
||||
|
try: |
||||
|
input("Press Enter to start tv_wrapper test...") |
||||
|
running = True |
||||
|
while running: |
||||
|
start_time = time.time() |
||||
|
teleData = tv_wrapper.get_motion_state_data() |
||||
|
|
||||
|
print("=== TeleData Snapshot ===") |
||||
|
print("[Head Rotation Matrix]:\n", teleData.head_rotation) |
||||
|
print("[Left EE Pose]:\n", teleData.left_arm_pose) |
||||
|
print("[Right EE Pose]:\n", teleData.right_arm_pose) |
||||
|
|
||||
|
if use_hand_track: |
||||
|
print("[Left Hand Position] shape {}:\n{}".format(teleData.left_hand_pos.shape, teleData.left_hand_pos)) |
||||
|
print("[Right Hand Position] shape {}:\n{}".format(teleData.right_hand_pos.shape, teleData.right_hand_pos)) |
||||
|
if teleData.left_hand_rot is not None: |
||||
|
print("[Left Hand Rotation] shape {}:\n{}".format(teleData.left_hand_rot.shape, teleData.left_hand_rot)) |
||||
|
if teleData.right_hand_rot is not None: |
||||
|
print("[Right Hand Rotation] shape {}:\n{}".format(teleData.right_hand_rot.shape, teleData.right_hand_rot)) |
||||
|
if teleData.left_pinch_value is not None: |
||||
|
print("[Left Pinch Value]: {:.2f}".format(teleData.left_pinch_value)) |
||||
|
if teleData.right_pinch_value is not None: |
||||
|
print("[Right Pinch Value]: {:.2f}".format(teleData.right_pinch_value)) |
||||
|
if teleData.tele_state: |
||||
|
state = teleData.tele_state |
||||
|
print("[Hand State]:") |
||||
|
print(f" Left Pinch state: {state.left_pinch_state}") |
||||
|
print(f" Left Squeeze: {state.left_squeeze_state} ({state.left_squeeze_value:.2f})") |
||||
|
print(f" Right Pinch state: {state.right_pinch_state}") |
||||
|
print(f" Right Squeeze: {state.right_squeeze_state} ({state.right_squeeze_value:.2f})") |
||||
|
else: |
||||
|
print(f"[Left Trigger Value]: {teleData.left_trigger_value:.2f}") |
||||
|
print(f"[Right Trigger Value]: {teleData.right_trigger_value:.2f}") |
||||
|
if teleData.tele_state: |
||||
|
state = teleData.tele_state |
||||
|
print("[Controller State]:") |
||||
|
print(f" Left Trigger: {state.left_trigger_state}") |
||||
|
print(f" Left Squeeze: {state.left_squeeze_ctrl_state} ({state.left_squeeze_ctrl_value:.2f})") |
||||
|
print(f" Left Thumbstick: {state.left_thumbstick_state} ({state.left_thumbstick_value})") |
||||
|
print(f" Left A/B Buttons: A={state.left_aButton}, B={state.left_bButton}") |
||||
|
print(f" Right Trigger: {state.right_trigger_state}") |
||||
|
print(f" Right Squeeze: {state.right_squeeze_ctrl_state} ({state.right_squeeze_ctrl_value:.2f})") |
||||
|
print(f" Right Thumbstick: {state.right_thumbstick_state} ({state.right_thumbstick_value})") |
||||
|
print(f" Right A/B Buttons: A={state.right_aButton}, B={state.right_bButton}") |
||||
|
|
||||
|
current_time = time.time() |
||||
|
time_elapsed = current_time - start_time |
||||
|
sleep_time = max(0, 0.033 - time_elapsed) |
||||
|
time.sleep(sleep_time) |
||||
|
# print(f"main process sleep: {sleep_time}") |
||||
|
|
||||
|
except KeyboardInterrupt: |
||||
|
running = False |
||||
|
print("KeyboardInterrupt, exiting program...") |
||||
|
finally: |
||||
|
image_shm.unlink() |
||||
|
image_shm.close() |
||||
|
print("Finally, exiting program...") |
||||
|
exit(0) |
||||
|
|
||||
|
if __name__ == '__main__': |
||||
|
run_test_tv_wrapper() |
||||
@ -1,53 +0,0 @@ |
|||||
import numpy as np |
|
||||
|
|
||||
|
|
||||
T_to_unitree_left_wrist = np.array([[1, 0, 0, 0], |
|
||||
[0, 0, -1, 0], |
|
||||
[0, 1, 0, 0], |
|
||||
[0, 0, 0, 1]]) |
|
||||
|
|
||||
T_to_unitree_right_wrist = np.array([[1, 0, 0, 0], |
|
||||
[0, 0, 1, 0], |
|
||||
[0, -1, 0, 0], |
|
||||
[0, 0, 0, 1]]) |
|
||||
|
|
||||
T_to_unitree_hand = np.array([[0, 0, 1, 0], |
|
||||
[-1,0, 0, 0], |
|
||||
[0, -1,0, 0], |
|
||||
[0, 0, 0, 1]]) |
|
||||
|
|
||||
T_robot_openxr = np.array([[0, 0, -1, 0], |
|
||||
[-1, 0, 0, 0], |
|
||||
[0, 1, 0, 0], |
|
||||
[0, 0, 0, 1]]) |
|
||||
|
|
||||
|
|
||||
|
|
||||
const_head_vuer_mat = np.array([[1, 0, 0, 0], |
|
||||
[0, 1, 0, 1.5], |
|
||||
[0, 0, 1, -0.2], |
|
||||
[0, 0, 0, 1]]) |
|
||||
|
|
||||
|
|
||||
# For G1 initial position |
|
||||
const_right_wrist_vuer_mat = np.array([[1, 0, 0, 0.15], |
|
||||
[0, 1, 0, 1.13], |
|
||||
[0, 0, 1, -0.3], |
|
||||
[0, 0, 0, 1]]) |
|
||||
|
|
||||
# For G1 initial position |
|
||||
const_left_wrist_vuer_mat = np.array([[1, 0, 0, -0.15], |
|
||||
[0, 1, 0, 1.13], |
|
||||
[0, 0, 1, -0.3], |
|
||||
[0, 0, 0, 1]]) |
|
||||
|
|
||||
# legacy |
|
||||
# const_right_wrist_vuer_mat = np.array([[1, 0, 0, 0.5], |
|
||||
# [0, 1, 0, 1], |
|
||||
# [0, 0, 1, -0.5], |
|
||||
# [0, 0, 0, 1]]) |
|
||||
|
|
||||
# const_left_wrist_vuer_mat = np.array([[1, 0, 0, -0.5], |
|
||||
# [0, 1, 0, 1], |
|
||||
# [0, 0, 1, -0.5], |
|
||||
# [0, 0, 0, 1]]) |
|
||||
@ -0,0 +1,20 @@ |
|||||
|
from setuptools import setup, find_packages |
||||
|
|
||||
|
setup( |
||||
|
name='open_television', |
||||
|
version='0.0.1', |
||||
|
description='XR vision and hand/controller interface for unitree robotics', |
||||
|
author='silencht', |
||||
|
packages=find_packages(), |
||||
|
install_requires=[ |
||||
|
'numpy==1.23.0', |
||||
|
'opencv_contrib_python==4.10.0.82', |
||||
|
'opencv_python==4.9.0.80', |
||||
|
'aiohttp==3.9.5', |
||||
|
'aiohttp_cors==0.7.0', |
||||
|
'aiortc==1.8.0', |
||||
|
'av==11.0.0', |
||||
|
'vuer==0.0.42rc16', |
||||
|
], |
||||
|
python_requires='>=3.8', |
||||
|
) |
||||
@ -1,147 +1,411 @@ |
|||||
import numpy as np |
import numpy as np |
||||
from teleop.open_television.television import TeleVision |
|
||||
from teleop.open_television.constants import * |
|
||||
from teleop.utils.mat_tool import mat_update, fast_mat_inv |
|
||||
|
from open_television.television import TeleVision |
||||
|
from dataclasses import dataclass, field |
||||
|
|
||||
""" |
""" |
||||
(basis) OpenXR Convention : y up, z back, x right. |
(basis) OpenXR Convention : y up, z back, x right. |
||||
(basis) Robot Convention : z up, y left, x front. |
(basis) Robot Convention : z up, y left, x front. |
||||
p.s. Vuer's all raw data follows OpenXR Convention, WORLD coordinate. |
|
||||
|
|
||||
under (basis) Robot Convention, wrist's initial pose convention: |
|
||||
|
under (basis) Robot Convention, humanoid arm's initial pose convention: |
||||
|
|
||||
# (Left Wrist) XR/AppleVisionPro Convention: |
|
||||
|
# (initial pose) OpenXR Left Arm Pose Convention (hand tracking): |
||||
- the x-axis pointing from wrist toward middle. |
- the x-axis pointing from wrist toward middle. |
||||
- the y-axis pointing from index toward pinky. |
- the y-axis pointing from index toward pinky. |
||||
- the z-axis pointing from palm toward back of the hand. |
- the z-axis pointing from palm toward back of the hand. |
||||
|
|
||||
# (Right Wrist) XR/AppleVisionPro Convention: |
|
||||
|
# (initial pose) OpenXR Right Arm Pose Convention (hand tracking): |
||||
- the x-axis pointing from wrist toward middle. |
- the x-axis pointing from wrist toward middle. |
||||
- the y-axis pointing from pinky toward index. |
- the y-axis pointing from pinky toward index. |
||||
- the z-axis pointing from palm toward back of the hand. |
- the z-axis pointing from palm toward back of the hand. |
||||
|
|
||||
# (Left Wrist URDF) Unitree Convention: |
|
||||
|
# (initial pose) Unitree Humanoid Left Arm URDF Convention: |
||||
- the x-axis pointing from wrist toward middle. |
- the x-axis pointing from wrist toward middle. |
||||
- the y-axis pointing from palm toward back of the hand. |
- the y-axis pointing from palm toward back of the hand. |
||||
- the z-axis pointing from pinky toward index. |
- the z-axis pointing from pinky toward index. |
||||
|
|
||||
# (Right Wrist URDF) Unitree Convention: |
|
||||
|
# (initial pose) Unitree Humanoid Right Arm URDF Convention: |
||||
- the x-axis pointing from wrist toward middle. |
- the x-axis pointing from wrist toward middle. |
||||
- the y-axis pointing from back of the hand toward palm. |
- the y-axis pointing from back of the hand toward palm. |
||||
- the z-axis pointing from pinky toward index. |
- the z-axis pointing from pinky toward index. |
||||
|
|
||||
under (basis) Robot Convention, hand's initial pose convention: |
|
||||
|
under (basis) Robot Convention, humanoid hand's initial pose convention: |
||||
|
|
||||
# (Left Hand) XR/AppleVisionPro Convention: |
|
||||
|
# (initial pose) OpenXR Left Hand Pose Convention (hand tracking): |
||||
- the x-axis pointing from wrist toward middle. |
- the x-axis pointing from wrist toward middle. |
||||
- the y-axis pointing from index toward pinky. |
- the y-axis pointing from index toward pinky. |
||||
- the z-axis pointing from palm toward back of the hand. |
- the z-axis pointing from palm toward back of the hand. |
||||
|
|
||||
# (Right Hand) XR/AppleVisionPro Convention: |
|
||||
|
# (initial pose) OpenXR Right Hand Pose Convention (hand tracking): |
||||
- the x-axis pointing from wrist toward middle. |
- the x-axis pointing from wrist toward middle. |
||||
- the y-axis pointing from pinky toward index. |
- the y-axis pointing from pinky toward index. |
||||
- the z-axis pointing from palm toward back of the hand. |
- the z-axis pointing from palm toward back of the hand. |
||||
|
|
||||
# (Left Hand URDF) Unitree Convention: |
|
||||
|
# (initial pose) Unitree Humanoid Left Hand URDF Convention: |
||||
- The x-axis pointing from palm toward back of the hand. |
- The x-axis pointing from palm toward back of the hand. |
||||
- The y-axis pointing from middle toward wrist. |
- The y-axis pointing from middle toward wrist. |
||||
- The z-axis pointing from pinky toward index. |
- The z-axis pointing from pinky toward index. |
||||
|
|
||||
# (Right Hand URDF) Unitree Convention: |
|
||||
|
# (initial pose) Unitree Humanoid Right Hand URDF Convention: |
||||
- The x-axis pointing from palm toward back of the hand. |
- The x-axis pointing from palm toward back of the hand. |
||||
- The y-axis pointing from middle toward wrist. |
- The y-axis pointing from middle toward wrist. |
||||
- The z-axis pointing from index toward pinky. |
- The z-axis pointing from index toward pinky. |
||||
|
|
||||
|
p.s. TeleVision (Vuer) obtains all raw data under the (basis) OpenXR Convention. |
||||
|
In addition, arm pose data (hand tracking) follows the (initial pose) OpenXR Arm Pose Convention, |
||||
|
while arm pose data (controller tracking) directly follows the (initial pose) Unitree Humanoid Arm URDF Convention (thus no transform is needed). |
||||
|
Meanwhile, all raw data is in the WORLD frame defined by XR device odometry. |
||||
|
|
||||
p.s. From website: https://registry.khronos.org/OpenXR/specs/1.1/man/html/openxr.html. |
|
||||
You can find **(Left/Right Wrist) XR/AppleVisionPro Convention** related information like this below: |
|
||||
"The wrist joint is located at the pivot point of the wrist, which is location invariant when twisting the hand without moving the forearm. |
|
||||
The backward (+Z) direction is parallel to the line from wrist joint to middle finger metacarpal joint, and points away from the finger tips. |
|
||||
The up (+Y) direction points out towards back of the hand and perpendicular to the skin at wrist. |
|
||||
The X direction is perpendicular to the Y and Z directions and follows the right hand rule." |
|
||||
Note: The above context is of course under **(basis) OpenXR Convention**. |
|
||||
|
p.s. From website: https://registry.khronos.org/OpenXR/specs/1.1/man/html/openxr.html. |
||||
|
You can find **(initial pose) OpenXR Left/Right Arm Pose Convention** related information like this below: |
||||
|
"The wrist joint is located at the pivot point of the wrist, which is location invariant when twisting the hand without moving the forearm. |
||||
|
The backward (+Z) direction is parallel to the line from wrist joint to middle finger metacarpal joint, and points away from the finger tips. |
||||
|
The up (+Y) direction points out towards back of the hand and perpendicular to the skin at wrist. |
||||
|
The X direction is perpendicular to the Y and Z directions and follows the right hand rule." |
||||
|
Note: The above context is of course under **(basis) OpenXR Convention**. |
||||
|
|
||||
p.s. **(Wrist/Hand URDF) Unitree Convention** information come from URDF files. |
|
||||
|
p.s. **Unitree Arm/Hand URDF initial pose Convention** information come from URDF files. |
||||
""" |
""" |
||||
|
|
||||
|
|
||||
|
def safe_mat_update(prev_mat, mat): |
||||
|
# Return previous matrix and False flag if the new matrix is non-singular (determinant ≠ 0). |
||||
|
det = np.linalg.det(mat) |
||||
|
if not np.isfinite(det) or np.isclose(det, 0.0, atol=1e-6): |
||||
|
return prev_mat, False |
||||
|
return mat, True |
||||
|
|
||||
|
def fast_mat_inv(mat): |
||||
|
ret = np.eye(4) |
||||
|
ret[:3, :3] = mat[:3, :3].T |
||||
|
ret[:3, 3] = -mat[:3, :3].T @ mat[:3, 3] |
||||
|
return ret |
||||
|
|
||||
|
def safe_rot_update(prev_rot_array, rot_array): |
||||
|
dets = np.linalg.det(rot_array) |
||||
|
if not np.all(np.isfinite(dets)) or np.any(np.isclose(dets, 0.0, atol=1e-6)): |
||||
|
return prev_rot_array, False |
||||
|
return rot_array, True |
||||
|
|
||||
|
# constants variable |
||||
|
T_TO_UNITREE_HUMANOID_LEFT_ARM = np.array([[1, 0, 0, 0], |
||||
|
[0, 0,-1, 0], |
||||
|
[0, 1, 0, 0], |
||||
|
[0, 0, 0, 1]]) |
||||
|
|
||||
|
T_TO_UNITREE_HUMANOID_RIGHT_ARM = np.array([[1, 0, 0, 0], |
||||
|
[0, 0, 1, 0], |
||||
|
[0,-1, 0, 0], |
||||
|
[0, 0, 0, 1]]) |
||||
|
|
||||
|
T_TO_UNITREE_HAND = np.array([[0, 0, 1, 0], |
||||
|
[-1, 0, 0, 0], |
||||
|
[0, -1, 0, 0], |
||||
|
[0, 0, 0, 1]]) |
||||
|
|
||||
|
T_ROBOT_OPENXR = np.array([[ 0, 0,-1, 0], |
||||
|
[-1, 0, 0, 0], |
||||
|
[ 0, 1, 0, 0], |
||||
|
[ 0, 0, 0, 1]]) |
||||
|
|
||||
|
T_OPENXR_ROBOT = np.array([[ 0,-1, 0, 0], |
||||
|
[ 0, 0, 1, 0], |
||||
|
[-1, 0, 0, 0], |
||||
|
[ 0, 0, 0, 1]]) |
||||
|
|
||||
|
R_ROBOT_OPENXR = np.array([[ 0, 0,-1], |
||||
|
[-1, 0, 0], |
||||
|
[ 0, 1, 0]]) |
||||
|
|
||||
|
R_OPENXR_ROBOT = np.array([[ 0,-1, 0], |
||||
|
[ 0, 0, 1], |
||||
|
[-1, 0, 0]]) |
||||
|
|
||||
|
CONST_HEAD_POSE = np.array([[1, 0, 0, 0], |
||||
|
[0, 1, 0, 1.5], |
||||
|
[0, 0, 1, -0.2], |
||||
|
[0, 0, 0, 1]]) |
||||
|
|
||||
|
# For Robot initial position |
||||
|
CONST_RIGHT_ARM_POSE = np.array([[1, 0, 0, 0.15], |
||||
|
[0, 1, 0, 1.13], |
||||
|
[0, 0, 1, -0.3], |
||||
|
[0, 0, 0, 1]]) |
||||
|
|
||||
|
CONST_LEFT_ARM_POSE = np.array([[1, 0, 0, -0.15], |
||||
|
[0, 1, 0, 1.13], |
||||
|
[0, 0, 1, -0.3], |
||||
|
[0, 0, 0, 1]]) |
||||
|
|
||||
|
CONST_HAND_ROT = np.tile(np.eye(3)[None, :, :], (25, 1, 1)) |
||||
|
|
||||
|
@dataclass |
||||
|
class TeleStateData: |
||||
|
# hand tracking |
||||
|
left_pinch_state: bool = False # True if index and thumb are pinching |
||||
|
left_squeeze_state: bool = False # True if hand is making a fist |
||||
|
left_squeeze_value: float = 0.0 # (0.0 ~ 1.0) degree of hand squeeze |
||||
|
right_pinch_state: bool = False # True if index and thumb are pinching |
||||
|
right_squeeze_state: bool = False # True if hand is making a fist |
||||
|
right_squeeze_value: float = 0.0 # (0.0 ~ 1.0) degree of hand squeeze |
||||
|
|
||||
|
# controller tracking |
||||
|
left_trigger_state: bool = False # True if trigger is actively pressed |
||||
|
left_squeeze_ctrl_state: bool = False # True if grip button is pressed |
||||
|
left_squeeze_ctrl_value: float = 0.0 # (0.0 ~ 1.0) grip pull depth |
||||
|
left_thumbstick_state: bool = False # True if thumbstick button is pressed |
||||
|
left_thumbstick_value: np.ndarray = field(default_factory=lambda: np.zeros(2)) # 2D vector (x, y), normalized |
||||
|
left_aButton: bool = False # True if A button is pressed |
||||
|
left_bButton: bool = False # True if B button is pressed |
||||
|
right_trigger_state: bool = False # True if trigger is actively pressed |
||||
|
right_squeeze_ctrl_state: bool = False # True if grip button is pressed |
||||
|
right_squeeze_ctrl_value: float = 0.0 # (0.0 ~ 1.0) grip pull depth |
||||
|
right_thumbstick_state: bool = False # True if thumbstick button is pressed |
||||
|
right_thumbstick_value: np.ndarray = field(default_factory=lambda: np.zeros(2)) # 2D vector (x, y), normalized |
||||
|
right_aButton: bool = False # True if A button is pressed |
||||
|
right_bButton: bool = False # True if B button is pressed |
||||
|
|
||||
|
@dataclass |
||||
|
class TeleData: |
||||
|
head_rotation: np.ndarray # (3,3) Head orientation matrix |
||||
|
left_arm_pose: np.ndarray # (4,4) SE(3) pose of left arm |
||||
|
right_arm_pose: np.ndarray # (4,4) SE(3) pose of right arm |
||||
|
left_hand_pos: np.ndarray = None # (25,3) 3D positions of left hand joints |
||||
|
right_hand_pos: np.ndarray = None # (25,3) 3D positions of right hand joints |
||||
|
left_hand_rot: np.ndarray = None # (25,3,3) rotation matrices of left hand joints |
||||
|
right_hand_rot: np.ndarray = None # (25,3,3) rotation matrices of right hand joints |
||||
|
left_pinch_value: float = None # float (1x.0 ~ 0.0) pinch distance between index and thumb |
||||
|
right_pinch_value: float = None # float (1x.0 ~ 0.0) pinch distance between index and thumb |
||||
|
left_trigger_value: float = None # float (10.0 ~ 0.0) trigger pull depth |
||||
|
right_trigger_value: float = None # float (10.0 ~ 0.0) trigger pull depth |
||||
|
tele_state: TeleStateData = field(default_factory=TeleStateData) |
||||
|
|
||||
|
|
||||
class TeleVisionWrapper: |
class TeleVisionWrapper: |
||||
def __init__(self, binocular, img_shape, img_shm_name): |
|
||||
self.tv = TeleVision(binocular, img_shape, img_shm_name) |
|
||||
|
|
||||
def get_data(self): |
|
||||
|
|
||||
# --------------------------------wrist------------------------------------- |
|
||||
|
|
||||
# TeleVision obtains a basis coordinate that is OpenXR Convention |
|
||||
head_vuer_mat, head_flag = mat_update(const_head_vuer_mat, self.tv.head_matrix.copy()) |
|
||||
left_wrist_vuer_mat, left_wrist_flag = mat_update(const_left_wrist_vuer_mat, self.tv.left_hand.copy()) |
|
||||
right_wrist_vuer_mat, right_wrist_flag = mat_update(const_right_wrist_vuer_mat, self.tv.right_hand.copy()) |
|
||||
|
|
||||
# Change basis convention: VuerMat ((basis) OpenXR Convention) to WristMat ((basis) Robot Convention) |
|
||||
# p.s. WristMat = T_{robot}_{openxr} * VuerMat * T_{robot}_{openxr}^T |
|
||||
# Reason for right multiply fast_mat_inv(T_robot_openxr): |
|
||||
# This is similarity transformation: B = PAP^{-1}, that is B ~ A |
|
||||
# For example: |
|
||||
# - For a pose data T_r under the Robot Convention, left-multiplying WristMat means: |
|
||||
# - WristMat * T_r ==> T_{robot}_{openxr} * VuerMat * T_{openxr}_{robot} * T_r |
|
||||
# - First, transform to the OpenXR Convention (The function of T_{openxr}_{robot}) |
|
||||
# - then, apply the rotation VuerMat in the OpenXR Convention (The function of VuerMat) |
|
||||
# - finally, transform back to the Robot Convention (The function of T_{robot}_{openxr}) |
|
||||
# This results in the same rotation effect under the Robot Convention as in the OpenXR Convention. |
|
||||
head_mat = T_robot_openxr @ head_vuer_mat @ fast_mat_inv(T_robot_openxr) |
|
||||
left_wrist_mat = T_robot_openxr @ left_wrist_vuer_mat @ fast_mat_inv(T_robot_openxr) |
|
||||
right_wrist_mat = T_robot_openxr @ right_wrist_vuer_mat @ fast_mat_inv(T_robot_openxr) |
|
||||
|
|
||||
# Change wrist convention: WristMat ((Left Wrist) XR/AppleVisionPro Convention) to UnitreeWristMat((Left Wrist URDF) Unitree Convention) |
|
||||
# Reason for right multiply (T_to_unitree_left_wrist) : Rotate 90 degrees counterclockwise about its own x-axis. |
|
||||
# Reason for right multiply (T_to_unitree_right_wrist): Rotate 90 degrees clockwise about its own x-axis. |
|
||||
unitree_left_wrist = left_wrist_mat @ (T_to_unitree_left_wrist if left_wrist_flag else np.eye(4)) |
|
||||
unitree_right_wrist = right_wrist_mat @ (T_to_unitree_right_wrist if right_wrist_flag else np.eye(4)) |
|
||||
|
|
||||
# Transfer from WORLD to HEAD coordinate (translation only). |
|
||||
unitree_left_wrist[0:3, 3] = unitree_left_wrist[0:3, 3] - head_mat[0:3, 3] |
|
||||
unitree_right_wrist[0:3, 3] = unitree_right_wrist[0:3, 3] - head_mat[0:3, 3] |
|
||||
|
|
||||
# --------------------------------hand------------------------------------- |
|
||||
|
|
||||
# Homogeneous, [xyz] to [xyz1] |
|
||||
# p.s. np.concatenate([25,3]^T,(1,25)) ==> hand_vuer_mat.shape is (4,25) |
|
||||
# Now under (basis) OpenXR Convention, mat shape like this: |
|
||||
# x0 x1 x2 ··· x23 x24 |
|
||||
# y0 y1 y1 ··· y23 y24 |
|
||||
# z0 z1 z2 ··· z23 z24 |
|
||||
# 1 1 1 ··· 1 1 |
|
||||
left_hand_vuer_mat = np.concatenate([self.tv.left_landmarks.copy().T, np.ones((1, self.tv.left_landmarks.shape[0]))]) |
|
||||
right_hand_vuer_mat = np.concatenate([self.tv.right_landmarks.copy().T, np.ones((1, self.tv.right_landmarks.shape[0]))]) |
|
||||
|
|
||||
# Change basis convention: from (basis) OpenXR Convention to (basis) Robot Convention |
|
||||
# Just a change of basis for 3D points. No rotation, only translation. No need to right-multiply fast_mat_inv(T_robot_openxr). |
|
||||
left_hand_mat = T_robot_openxr @ left_hand_vuer_mat |
|
||||
right_hand_mat = T_robot_openxr @ right_hand_vuer_mat |
|
||||
|
|
||||
# Transfer from WORLD to WRIST coordinate. (this process under (basis) Robot Convention) |
|
||||
# p.s. HandMat_WristBased = WristMat_{wrold}_{wrist}^T * HandMat_{wrold} |
|
||||
# HandMat_WristBased = WristMat_{wrist}_{wrold} * HandMat_{wrold}, that is HandMat_{wrist} |
|
||||
left_hand_mat_wb = fast_mat_inv(left_wrist_mat) @ left_hand_mat |
|
||||
right_hand_mat_wb = fast_mat_inv(right_wrist_mat) @ right_hand_mat |
|
||||
# Change hand convention: HandMat ((Left Hand) XR/AppleVisionPro Convention) to UnitreeHandMat((Left Hand URDF) Unitree Convention) |
|
||||
# Reason for left multiply : T_to_unitree_hand @ left_hand_mat_wb ==> (4,4) @ (4,25) ==> (4,25), (4,25)[0:3, :] ==> (3,25), (3,25).T ==> (25,3) |
|
||||
# Now under (Left Hand URDF) Unitree Convention, mat shape like this: |
|
||||
# [x0, y0, z0] |
|
||||
# [x1, y1, z1] |
|
||||
# ··· |
|
||||
# [x23,y23,z23] |
|
||||
# [x24,y24,z24] |
|
||||
unitree_left_hand = (T_to_unitree_hand @ left_hand_mat_wb)[0:3, :].T |
|
||||
unitree_right_hand = (T_to_unitree_hand @ right_hand_mat_wb)[0:3, :].T |
|
||||
|
|
||||
# --------------------------------offset------------------------------------- |
|
||||
|
|
||||
head_rmat = head_mat[:3, :3] |
|
||||
# The origin of the coordinate for IK Solve is the WAIST joint motor. You can use teleop/robot_control/robot_arm_ik.py Unit_Test to check it. |
|
||||
# The origin of the coordinate of unitree_left_wrist is HEAD. So it is necessary to translate the origin of unitree_left_wrist from HEAD to WAIST. |
|
||||
unitree_left_wrist[0, 3] +=0.15 |
|
||||
unitree_right_wrist[0,3] +=0.15 |
|
||||
unitree_left_wrist[2, 3] +=0.45 |
|
||||
unitree_right_wrist[2,3] +=0.45 |
|
||||
|
|
||||
return head_rmat, unitree_left_wrist, unitree_right_wrist, unitree_left_hand, unitree_right_hand |
|
||||
|
def __init__(self, binocular: bool, use_hand_tracking: bool, img_shape, img_shm_name, return_state_data: bool = True, return_hand_rot_data: bool = False, |
||||
|
cert_file = None, key_file = None): |
||||
|
""" |
||||
|
TeleVisionWrapper is a wrapper for the TeleVision class, which handles XR device's data suit for robot control. |
||||
|
It initializes the TeleVision instance with the specified parameters and provides a method to get motion state data. |
||||
|
|
||||
|
:param binocular: A boolean indicating whether the head camera device is binocular or not. |
||||
|
:param use_hand_tracking: A boolean indicating whether to use hand tracking or use controller tracking. |
||||
|
:param img_shape: The shape of the image to be processed. |
||||
|
:param img_shm_name: The name of the shared memory for the image. |
||||
|
:param return_state: A boolean indicating whether to return the state of the hand or controller. |
||||
|
:param return_hand_rot: A boolean indicating whether to return the hand rotation data. |
||||
|
:param cert_file: The path to the certificate file for secure connection. |
||||
|
:param key_file: The path to the key file for secure connection. |
||||
|
""" |
||||
|
self.use_hand_tracking = use_hand_tracking |
||||
|
self.return_state_data = return_state_data |
||||
|
self.return_hand_rot_data = return_hand_rot_data |
||||
|
self.tvuer = TeleVision(binocular, use_hand_tracking, img_shape, img_shm_name, cert_file=cert_file, key_file=key_file) |
||||
|
|
||||
|
def get_motion_state_data(self): |
||||
|
""" |
||||
|
Get processed motion state data from the TeleVision instance. |
||||
|
|
||||
|
All returned data are transformed from the OpenXR Convention to the (Robot & Unitree) Convention. |
||||
|
""" |
||||
|
# Variable Naming Convention below, |
||||
|
# ┌────────────┬───────────────────────────┬──────────────────────────────────┬────────────────────────────────────┬────────────────────────────────────┐ |
||||
|
# │left / right│ Bxr │ Brobot │ IPxr │ IPunitree │ |
||||
|
# │────────────│───────────────────────────│──────────────────────────────────│────────────────────────────────────│────────────────────────────────────│ |
||||
|
# │ side │ (basis) OpenXR Convention │ (basis) Robot Convention │ (initial pose) OpenXR Convention │ (initial pose) Unitree Convention │ |
||||
|
# └────────────┴───────────────────────────┴──────────────────────────────────┴────────────────────────────────────┴────────────────────────────────────┘ |
||||
|
# ┌───────────────────────────────────┬─────────────────────┐ |
||||
|
# │ world / arm / head / waist │ arm / head / hand │ |
||||
|
# │───────────────────────────────────│─────────────────────│ |
||||
|
# │ source frame │ target frame │ |
||||
|
# └───────────────────────────────────┴─────────────────────┘ |
||||
|
|
||||
|
# TeleVision (Vuer) obtains all raw data under the (basis) OpenXR Convention. |
||||
|
Bxr_world_head, head_pose_is_valid = safe_mat_update(CONST_HEAD_POSE, self.tvuer.head_pose) |
||||
|
|
||||
|
if self.use_hand_tracking: |
||||
|
# 'Arm' pose data follows (basis) OpenXR Convention and (initial pose) OpenXR Arm Convention. |
||||
|
left_IPxr_Bxr_world_arm, left_arm_is_valid = safe_mat_update(CONST_LEFT_ARM_POSE, self.tvuer.left_arm_pose) |
||||
|
right_IPxr_Bxr_world_arm, right_arm_is_valid = safe_mat_update(CONST_RIGHT_ARM_POSE, self.tvuer.right_arm_pose) |
||||
|
|
||||
|
# Change basis convention |
||||
|
# From (basis) OpenXR Convention to (basis) Robot Convention: |
||||
|
# Brobot_Pose = T_{robot}_{openxr} * Bxr_Pose * T_{robot}_{openxr}^T ==> |
||||
|
# Brobot_Pose = T_{robot}_{openxr} * Bxr_Pose * T_{openxr}_{robot} |
||||
|
# Reason for right multiply T_OPENXR_ROBOT = fast_mat_inv(T_ROBOT_OPENXR): |
||||
|
# This is similarity transformation: B = PAP^{-1}, that is B ~ A |
||||
|
# For example: |
||||
|
# - For a pose data T_r under the (basis) Robot Convention, left-multiplying Brobot_Pose means: |
||||
|
# Brobot_Pose * T_r ==> T_{robot}_{openxr} * PoseMatrix_openxr * T_{openxr}_{robot} * T_r |
||||
|
# - First, transform T_r to the (basis) OpenXR Convention (The function of T_{openxr}_{robot}) |
||||
|
# - Then, apply the rotation PoseMatrix_openxr in the OpenXR Convention (The function of PoseMatrix_openxr) |
||||
|
# - Finally, transform back to the Robot Convention (The function of T_{robot}_{openxr}) |
||||
|
# - This results in the same rotation effect under the Robot Convention as in the OpenXR Convention. |
||||
|
Brobot_world_head = T_ROBOT_OPENXR @ Bxr_world_head @ T_OPENXR_ROBOT |
||||
|
left_IPxr_Brobot_world_arm = T_ROBOT_OPENXR @ left_IPxr_Bxr_world_arm @ T_OPENXR_ROBOT |
||||
|
right_IPxr_Brobot_world_arm = T_ROBOT_OPENXR @ right_IPxr_Bxr_world_arm @ T_OPENXR_ROBOT |
||||
|
|
||||
|
# Change initial pose convention |
||||
|
# From (initial pose) OpenXR Arm Convention to (initial pose) Unitree Humanoid Arm URDF Convention |
||||
|
# Reason for right multiply (T_TO_UNITREE_HUMANOID_LEFT_ARM) : Rotate 90 degrees counterclockwise about its own x-axis. |
||||
|
# Reason for right multiply (T_TO_UNITREE_HUMANOID_RIGHT_ARM): Rotate 90 degrees clockwise about its own x-axis. |
||||
|
left_IPunitree_Brobot_world_arm = left_IPxr_Brobot_world_arm @ (T_TO_UNITREE_HUMANOID_LEFT_ARM if left_arm_is_valid else np.eye(4)) |
||||
|
right_IPunitree_Brobot_world_arm = right_IPxr_Brobot_world_arm @ (T_TO_UNITREE_HUMANOID_RIGHT_ARM if right_arm_is_valid else np.eye(4)) |
||||
|
|
||||
|
# Transfer from WORLD to HEAD coordinate (translation adjustment only) |
||||
|
left_IPunitree_Brobot_head_arm = left_IPunitree_Brobot_world_arm.copy() |
||||
|
right_IPunitree_Brobot_head_arm = right_IPunitree_Brobot_world_arm.copy() |
||||
|
left_IPunitree_Brobot_head_arm[0:3, 3] = left_IPunitree_Brobot_head_arm[0:3, 3] - Brobot_world_head[0:3, 3] |
||||
|
right_IPunitree_Brobot_head_arm[0:3, 3] = right_IPunitree_Brobot_world_arm[0:3, 3] - Brobot_world_head[0:3, 3] |
||||
|
|
||||
|
# =====coordinate origin offset===== |
||||
|
Brobot_world_head_rot = Brobot_world_head[:3, :3] |
||||
|
# The origin of the coordinate for IK Solve is near the WAIST joint motor. You can use teleop/robot_control/robot_arm_ik.py Unit_Test to visualize it. |
||||
|
# The origin of the coordinate of IPunitree_Brobot_head_arm is HEAD. |
||||
|
# So it is necessary to translate the origin of IPunitree_Brobot_head_arm from HEAD to WAIST. |
||||
|
left_IPunitree_Brobot_waist_arm = left_IPunitree_Brobot_head_arm.copy() |
||||
|
right_IPunitree_Brobot_waist_arm = right_IPunitree_Brobot_head_arm.copy() |
||||
|
left_IPunitree_Brobot_waist_arm[0, 3] +=0.15 # x |
||||
|
right_IPunitree_Brobot_waist_arm[0,3] +=0.15 |
||||
|
left_IPunitree_Brobot_waist_arm[2, 3] +=0.45 # z |
||||
|
right_IPunitree_Brobot_waist_arm[2,3] +=0.45 |
||||
|
|
||||
|
# -----------------------------------hand position---------------------------------------- |
||||
|
if left_arm_is_valid and right_arm_is_valid: |
||||
|
# Homogeneous, [xyz] to [xyz1] |
||||
|
# np.concatenate([25,3]^T,(1,25)) ==> Bxr_world_hand_pos.shape is (4,25) |
||||
|
# Now under (basis) OpenXR Convention, Bxr_world_hand_pos data like this: |
||||
|
# [x0 x1 x2 ··· x23 x24] |
||||
|
# [y0 y1 y1 ··· y23 y24] |
||||
|
# [z0 z1 z2 ··· z23 z24] |
||||
|
# [ 1 1 1 ··· 1 1] |
||||
|
left_IPxr_Bxr_world_hand_pos = np.concatenate([self.tvuer.left_hand_positions.T, np.ones((1, self.tvuer.left_hand_positions.shape[0]))]) |
||||
|
right_IPxr_Bxr_world_hand_pos = np.concatenate([self.tvuer.right_hand_positions.T, np.ones((1, self.tvuer.right_hand_positions.shape[0]))]) |
||||
|
|
||||
|
# Change basis convention |
||||
|
# From (basis) OpenXR Convention to (basis) Robot Convention |
||||
|
# Just a change of basis for 3D points. No rotation, only translation. So, no need to right-multiply fast_mat_inv(T_ROBOT_OPENXR). |
||||
|
left_IPxr_Brobot_world_hand_pos = T_ROBOT_OPENXR @ left_IPxr_Bxr_world_hand_pos |
||||
|
right_IPxr_Brobot_world_hand_pos = T_ROBOT_OPENXR @ right_IPxr_Bxr_world_hand_pos |
||||
|
|
||||
|
# Transfer from WORLD to ARM frame under (basis) Robot Convention: |
||||
|
# Brobot_{world}_{arm}^T * Brobot_{world}_pos ==> Brobot_{arm}_{world} * Brobot_{world}_pos ==> Brobot_arm_hand_pos, Now it's based on the arm frame. |
||||
|
left_IPxr_Brobot_arm_hand_pos = fast_mat_inv(left_IPxr_Brobot_world_arm) @ left_IPxr_Brobot_world_hand_pos |
||||
|
right_IPxr_Brobot_arm_hand_pos = fast_mat_inv(right_IPxr_Brobot_world_arm) @ right_IPxr_Brobot_world_hand_pos |
||||
|
|
||||
|
# Change initial pose convention |
||||
|
# From (initial pose) XR Hand Convention to (initial pose) Unitree Humanoid Hand URDF Convention: |
||||
|
# T_TO_UNITREE_HAND @ IPxr_Brobot_arm_hand_pos ==> IPunitree_Brobot_arm_hand_pos |
||||
|
# ((4,4) @ (4,25))[0:3, :].T ==> (4,25)[0:3, :].T ==> (3,25).T ==> (25,3) |
||||
|
# Now under (initial pose) Unitree Humanoid Hand URDF Convention, matrix shape like this: |
||||
|
# [x0, y0, z0] |
||||
|
# [x1, y1, z1] |
||||
|
# ··· |
||||
|
# [x23,y23,z23] |
||||
|
# [x24,y24,z24] |
||||
|
left_IPunitree_Brobot_arm_hand_pos = (T_TO_UNITREE_HAND @ left_IPxr_Brobot_arm_hand_pos)[0:3, :].T |
||||
|
right_IPunitree_Brobot_arm_hand_pos = (T_TO_UNITREE_HAND @ right_IPxr_Brobot_arm_hand_pos)[0:3, :].T |
||||
|
else: |
||||
|
left_IPunitree_Brobot_arm_hand_pos = np.zeros((25, 3)) |
||||
|
right_IPunitree_Brobot_arm_hand_pos = np.zeros((25, 3)) |
||||
|
|
||||
|
# -----------------------------------hand rotation---------------------------------------- |
||||
|
if self.return_hand_rot_data: |
||||
|
left_Bxr_world_hand_rot, left_hand_rot_is_valid = safe_rot_update(CONST_HAND_ROT, self.tvuer.left_hand_orientations) # [25, 3, 3] |
||||
|
right_Bxr_world_hand_rot, right_hand_rot_is_valid = safe_rot_update(CONST_HAND_ROT, self.tvuer.right_hand_orientations) |
||||
|
|
||||
|
if left_hand_rot_is_valid and right_hand_rot_is_valid: |
||||
|
left_Bxr_arm_hand_rot = np.einsum('ij,njk->nik', left_IPxr_Bxr_world_arm[:3, :3].T, left_Bxr_world_hand_rot) |
||||
|
right_Bxr_arm_hand_rot = np.einsum('ij,njk->nik', right_IPxr_Bxr_world_arm[:3, :3].T, right_Bxr_world_hand_rot) |
||||
|
# Change basis convention |
||||
|
left_Brobot_arm_hand_rot = np.einsum('ij,njk,kl->nil', R_ROBOT_OPENXR, left_Bxr_arm_hand_rot, R_OPENXR_ROBOT) |
||||
|
right_Brobot_arm_hand_rot = np.einsum('ij,njk,kl->nil', R_ROBOT_OPENXR, right_Bxr_arm_hand_rot, R_OPENXR_ROBOT) |
||||
|
else: |
||||
|
left_Brobot_arm_hand_rot = left_Bxr_world_hand_rot |
||||
|
right_Brobot_arm_hand_rot = right_Bxr_world_hand_rot |
||||
|
else: |
||||
|
left_Brobot_arm_hand_rot = None |
||||
|
right_Brobot_arm_hand_rot = None |
||||
|
|
||||
|
if self.return_state_data: |
||||
|
hand_state = TeleStateData( |
||||
|
left_pinch_state=self.tvuer.left_hand_pinch_state, |
||||
|
left_squeeze_state=self.tvuer.left_hand_squeeze_state, |
||||
|
left_squeeze_value=self.tvuer.left_hand_squeeze_value, |
||||
|
right_pinch_state=self.tvuer.right_hand_pinch_state, |
||||
|
right_squeeze_state=self.tvuer.right_hand_squeeze_state, |
||||
|
right_squeeze_value=self.tvuer.right_hand_squeeze_value, |
||||
|
) |
||||
|
else: |
||||
|
hand_state = None |
||||
|
|
||||
|
return TeleData( |
||||
|
head_rotation=Brobot_world_head_rot, |
||||
|
left_arm_pose=left_IPunitree_Brobot_waist_arm, |
||||
|
right_arm_pose=right_IPunitree_Brobot_waist_arm, |
||||
|
left_hand_pos=left_IPunitree_Brobot_arm_hand_pos, |
||||
|
right_hand_pos=right_IPunitree_Brobot_arm_hand_pos, |
||||
|
left_hand_rot=left_Brobot_arm_hand_rot, |
||||
|
right_hand_rot=right_Brobot_arm_hand_rot, |
||||
|
left_pinch_value=self.tvuer.left_hand_pinch_value * 100.0, |
||||
|
right_pinch_value=self.tvuer.right_hand_pinch_value * 100.0, |
||||
|
tele_state=hand_state |
||||
|
) |
||||
|
else: |
||||
|
# Controller pose data directly follows the (initial pose) Unitree Humanoid Arm URDF Convention (thus no transform is needed). |
||||
|
left_IPunitree_Bxr_world_arm, left_arm_is_valid = safe_mat_update(CONST_LEFT_ARM_POSE, self.tvuer.left_arm_pose) |
||||
|
right_IPunitree_Bxr_world_arm, right_arm_is_valid = safe_mat_update(CONST_RIGHT_ARM_POSE, self.tvuer.right_arm_pose) |
||||
|
|
||||
|
# Change basis convention |
||||
|
Brobot_world_head = T_ROBOT_OPENXR @ Bxr_world_head @ T_OPENXR_ROBOT |
||||
|
left_IPunitree_Brobot_world_arm = T_ROBOT_OPENXR @ left_IPunitree_Bxr_world_arm @ T_OPENXR_ROBOT |
||||
|
right_IPunitree_Brobot_world_arm = T_ROBOT_OPENXR @ right_IPunitree_Bxr_world_arm @ T_OPENXR_ROBOT |
||||
|
|
||||
|
# Transfer from WORLD to HEAD coordinate (translation adjustment only) |
||||
|
left_IPunitree_Brobot_head_arm = left_IPunitree_Brobot_world_arm.copy() |
||||
|
right_IPunitree_Brobot_head_arm = right_IPunitree_Brobot_world_arm.copy() |
||||
|
left_IPunitree_Brobot_head_arm[0:3, 3] = left_IPunitree_Brobot_head_arm[0:3, 3] - Brobot_world_head[0:3, 3] |
||||
|
right_IPunitree_Brobot_head_arm[0:3, 3] = right_IPunitree_Brobot_head_arm[0:3, 3] - Brobot_world_head[0:3, 3] |
||||
|
|
||||
|
# =====coordinate origin offset===== |
||||
|
Brobot_world_head_rot = Brobot_world_head[:3, :3] |
||||
|
# The origin of the coordinate for IK Solve is near the WAIST joint motor. You can use teleop/robot_control/robot_arm_ik.py Unit_Test to check it. |
||||
|
# The origin of the coordinate of IPunitree_Brobot_head_arm is HEAD. |
||||
|
# So it is necessary to translate the origin of IPunitree_Brobot_head_arm from HEAD to WAIST. |
||||
|
left_IPunitree_Brobot_waist_arm = left_IPunitree_Brobot_head_arm.copy() |
||||
|
right_IPunitree_Brobot_waist_arm = right_IPunitree_Brobot_head_arm.copy() |
||||
|
left_IPunitree_Brobot_waist_arm[0, 3] +=0.15 # x |
||||
|
right_IPunitree_Brobot_waist_arm[0,3] +=0.15 |
||||
|
left_IPunitree_Brobot_waist_arm[2, 3] +=0.45 # z |
||||
|
right_IPunitree_Brobot_waist_arm[2,3] +=0.45 |
||||
|
# left_IPunitree_Brobot_waist_arm[1, 3] +=0.02 # y |
||||
|
# right_IPunitree_Brobot_waist_arm[1,3] +=0.02 |
||||
|
|
||||
|
# return data |
||||
|
if self.return_state_data: |
||||
|
controller_state = TeleStateData( |
||||
|
left_trigger_state=self.tvuer.left_controller_trigger_state, |
||||
|
left_squeeze_ctrl_state=self.tvuer.left_controller_squeeze_state, |
||||
|
left_squeeze_ctrl_value=self.tvuer.left_controller_squeeze_value, |
||||
|
left_thumbstick_state=self.tvuer.left_controller_thumbstick_state, |
||||
|
left_thumbstick_value=self.tvuer.left_controller_thumbstick_value, |
||||
|
left_aButton=self.tvuer.left_controller_aButton, |
||||
|
left_bButton=self.tvuer.left_controller_bButton, |
||||
|
right_trigger_state=self.tvuer.right_controller_trigger_state, |
||||
|
right_squeeze_ctrl_state=self.tvuer.right_controller_squeeze_state, |
||||
|
right_squeeze_ctrl_value=self.tvuer.right_controller_squeeze_value, |
||||
|
right_thumbstick_state=self.tvuer.right_controller_thumbstick_state, |
||||
|
right_thumbstick_value=self.tvuer.right_controller_thumbstick_value, |
||||
|
right_aButton=self.tvuer.right_controller_aButton, |
||||
|
right_bButton=self.tvuer.right_controller_bButton, |
||||
|
) |
||||
|
else: |
||||
|
controller_state = None |
||||
|
|
||||
|
return TeleData( |
||||
|
head_rotation=Brobot_world_head_rot, |
||||
|
left_arm_pose=left_IPunitree_Brobot_waist_arm, |
||||
|
right_arm_pose=right_IPunitree_Brobot_waist_arm, |
||||
|
left_trigger_value=10.0 - self.tvuer.left_controller_trigger_value * 10, |
||||
|
right_trigger_value=10.0 - self.tvuer.right_controller_trigger_value * 10, |
||||
|
tele_state=controller_state |
||||
|
) |
||||
@ -1,14 +0,0 @@ |
|||||
import numpy as np |
|
||||
|
|
||||
def mat_update(prev_mat, mat): |
|
||||
if np.linalg.det(mat) == 0: |
|
||||
return prev_mat, False # Return previous matrix and False flag if the new matrix is non-singular (determinant ≠ 0). |
|
||||
else: |
|
||||
return mat, True |
|
||||
|
|
||||
|
|
||||
def fast_mat_inv(mat): |
|
||||
ret = np.eye(4) |
|
||||
ret[:3, :3] = mat[:3, :3].T |
|
||||
ret[:3, 3] = -mat[:3, :3].T @ mat[:3, 3] |
|
||||
return ret |
|
||||
Write
Preview
Loading…
Cancel
Save
Reference in new issue