diff --git a/teleop/image_server/image_client.py b/teleop/image_server/image_client.py index 3806f4b..1ff75dd 100644 --- a/teleop/image_server/image_client.py +++ b/teleop/image_server/image_client.py @@ -5,6 +5,8 @@ import time import struct from collections import deque from multiprocessing import shared_memory +import logging_mp +logger_mp = logging_mp.get_logger(__name__) class ImageClient: def __init__(self, tv_img_shape = None, tv_img_shm_name = None, wrist_img_shape = None, wrist_img_shm_name = None, @@ -85,10 +87,10 @@ class ImageClient: if frame_id != expected_frame_id: lost = frame_id - expected_frame_id if lost < 0: - print(f"[Image Client] Received out-of-order frame ID: {frame_id}") + logger_mp.info(f"[Image Client] Received out-of-order frame ID: {frame_id}") else: self._lost_frames += lost - print(f"[Image Client] Detected lost frames: {lost}, Expected frame ID: {expected_frame_id}, Received frame ID: {frame_id}") + logger_mp.warning(f"[Image Client] Detected lost frames: {lost}, Expected frame ID: {expected_frame_id}, Received frame ID: {frame_id}") self._last_frame_id = frame_id self._total_frames = frame_id + 1 @@ -111,7 +113,7 @@ class ImageClient: # Calculate lost frame rate lost_frame_rate = (self._lost_frames / self._total_frames) * 100 if self._total_frames > 0 else 0 - print(f"[Image Client] Real-time FPS: {real_time_fps:.2f}, Avg Latency: {avg_latency*1000:.2f} ms, Max Latency: {max_latency*1000:.2f} ms, \ + logger_mp.info(f"[Image Client] Real-time FPS: {real_time_fps:.2f}, Avg Latency: {avg_latency*1000:.2f} ms, Max Latency: {max_latency*1000:.2f} ms, \ Min Latency: {min_latency*1000:.2f} ms, Jitter: {jitter*1000:.2f} ms, Lost Frame Rate: {lost_frame_rate:.2f}%") def _close(self): @@ -119,7 +121,7 @@ class ImageClient: self._context.term() if self._image_show: cv2.destroyAllWindows() - print("Image client has been closed.") + logger_mp.info("Image client has been closed.") def receive_process(self): @@ -129,7 +131,7 @@ class ImageClient: self._socket.connect(f"tcp://{self._server_address}:{self._port}") self._socket.setsockopt_string(zmq.SUBSCRIBE, "") - print("\nImage client has started, waiting to receive data...") + logger_mp.info("\nImage client has started, waiting to receive data...") try: while self.running: # Receive message @@ -144,7 +146,7 @@ class ImageClient: jpg_bytes = message[header_size:] timestamp, frame_id = struct.unpack('dI', header) except struct.error as e: - print(f"[Image Client] Error unpacking header: {e}, discarding message.") + logger_mp.warning(f"[Image Client] Error unpacking header: {e}, discarding message.") continue else: # No header, entire message is image data @@ -153,7 +155,7 @@ class ImageClient: np_img = np.frombuffer(jpg_bytes, dtype=np.uint8) current_image = cv2.imdecode(np_img, cv2.IMREAD_COLOR) if current_image is None: - print("[Image Client] Failed to decode image.") + logger_mp.warning("[Image Client] Failed to decode image.") continue if self.tv_enable_shm: @@ -174,9 +176,9 @@ class ImageClient: self._print_performance_metrics(receive_time) except KeyboardInterrupt: - print("Image client interrupted by user.") + logger_mp.info("Image client interrupted by user.") except Exception as e: - print(f"[Image Client] An error occurred while receiving data: {e}") + logger_mp.warning(f"[Image Client] An error occurred while receiving data: {e}") finally: self._close() diff --git a/teleop/image_server/image_server.py b/teleop/image_server/image_server.py index fa542ff..19799fb 100644 --- a/teleop/image_server/image_server.py +++ b/teleop/image_server/image_server.py @@ -5,6 +5,8 @@ import struct from collections import deque import numpy as np import pyrealsense2 as rs +import logging_mp +logger_mp = logging_mp.get_logger(__name__, level=logging_mp.DEBUG) class RealSenseCamera(object): @@ -37,7 +39,7 @@ class RealSenseCamera(object): profile = self.pipeline.start(config) self._device = profile.get_device() if self._device is None: - print('[Image Server] pipe_profile.get_device() is None .') + logger_mp.error('[Image Server] pipe_profile.get_device() is None .') if self.enable_depth: assert self._device is not None depth_sensor = self._device.first_depth_sensor() @@ -82,7 +84,7 @@ class OpenCVCamera(): # Test if the camera can read frames if not self._can_read_frame(): - print(f"[Image Server] Camera {self.id} Error: Failed to initialize the camera or read frames. Exiting...") + logger_mp.error(f"[Image Server] Camera {self.id} Error: Failed to initialize the camera or read frames. Exiting...") self.release() def _can_read_frame(self): @@ -136,7 +138,7 @@ class ImageServer: #'wrist_camera_id_numbers': ["218622271789", "241222076627"], # serial number (realsense) } """ - print(config) + logger_mp.info(config) self.fps = config.get('fps', 30) self.head_camera_type = config.get('head_camera_type', 'opencv') self.head_image_shape = config.get('head_camera_image_shape', [480, 640]) # (height, width) @@ -161,7 +163,7 @@ class ImageServer: camera = RealSenseCamera(img_shape=self.head_image_shape, fps=self.fps, serial_number=serial_number) self.head_cameras.append(camera) else: - print(f"[Image Server] Unsupported head_camera_type: {self.head_camera_type}") + logger_mp.warning(f"[Image Server] Unsupported head_camera_type: {self.head_camera_type}") # Initialize wrist cameras if provided self.wrist_cameras = [] @@ -175,7 +177,7 @@ class ImageServer: camera = RealSenseCamera(img_shape=self.wrist_image_shape, fps=self.fps, serial_number=serial_number) self.wrist_cameras.append(camera) else: - print(f"[Image Server] Unsupported wrist_camera_type: {self.wrist_camera_type}") + logger_mp.warning(f"[Image Server] Unsupported wrist_camera_type: {self.wrist_camera_type}") # Set ZeroMQ context and socket self.context = zmq.Context() @@ -187,21 +189,21 @@ class ImageServer: for cam in self.head_cameras: if isinstance(cam, OpenCVCamera): - print(f"[Image Server] Head camera {cam.id} resolution: {cam.cap.get(cv2.CAP_PROP_FRAME_HEIGHT)} x {cam.cap.get(cv2.CAP_PROP_FRAME_WIDTH)}") + logger_mp.info(f"[Image Server] Head camera {cam.id} resolution: {cam.cap.get(cv2.CAP_PROP_FRAME_HEIGHT)} x {cam.cap.get(cv2.CAP_PROP_FRAME_WIDTH)}") elif isinstance(cam, RealSenseCamera): - print(f"[Image Server] Head camera {cam.serial_number} resolution: {cam.img_shape[0]} x {cam.img_shape[1]}") + logger_mp.info(f"[Image Server] Head camera {cam.serial_number} resolution: {cam.img_shape[0]} x {cam.img_shape[1]}") else: - print("[Image Server] Unknown camera type in head_cameras.") + logger_mp.warning("[Image Server] Unknown camera type in head_cameras.") for cam in self.wrist_cameras: if isinstance(cam, OpenCVCamera): - print(f"[Image Server] Wrist camera {cam.id} resolution: {cam.cap.get(cv2.CAP_PROP_FRAME_HEIGHT)} x {cam.cap.get(cv2.CAP_PROP_FRAME_WIDTH)}") + logger_mp.info(f"[Image Server] Wrist camera {cam.id} resolution: {cam.cap.get(cv2.CAP_PROP_FRAME_HEIGHT)} x {cam.cap.get(cv2.CAP_PROP_FRAME_WIDTH)}") elif isinstance(cam, RealSenseCamera): - print(f"[Image Server] Wrist camera {cam.serial_number} resolution: {cam.img_shape[0]} x {cam.img_shape[1]}") + logger_mp.info(f"[Image Server] Wrist camera {cam.serial_number} resolution: {cam.img_shape[0]} x {cam.img_shape[1]}") else: - print("[Image Server] Unknown camera type in wrist_cameras.") + logger_mp.warning("[Image Server] Unknown camera type in wrist_cameras.") - print("[Image Server] Image server has started, waiting for client connections...") + logger_mp.info("[Image Server] Image server has started, waiting for client connections...") @@ -224,7 +226,7 @@ class ImageServer: if self.frame_count % 30 == 0: elapsed_time = current_time - self.start_time real_time_fps = len(self.frame_times) / self.time_window - print(f"[Image Server] Real-time FPS: {real_time_fps:.2f}, Total frames sent: {self.frame_count}, Elapsed time: {elapsed_time:.2f} sec") + logger_mp.info(f"[Image Server] Real-time FPS: {real_time_fps:.2f}, Total frames sent: {self.frame_count}, Elapsed time: {elapsed_time:.2f} sec") def _close(self): for cam in self.head_cameras: @@ -233,7 +235,7 @@ class ImageServer: cam.release() self.socket.close() self.context.term() - print("[Image Server] The server has been closed.") + logger_mp.info("[Image Server] The server has been closed.") def send_process(self): try: @@ -243,12 +245,12 @@ class ImageServer: if self.head_camera_type == 'opencv': color_image = cam.get_frame() if color_image is None: - print("[Image Server] Head camera frame read is error.") + logger_mp.error("[Image Server] Head camera frame read is error.") break elif self.head_camera_type == 'realsense': color_image, depth_iamge = cam.get_frame() if color_image is None: - print("[Image Server] Head camera frame read is error.") + logger_mp.error("[Image Server] Head camera frame read is error.") break head_frames.append(color_image) if len(head_frames) != len(self.head_cameras): @@ -261,12 +263,12 @@ class ImageServer: if self.wrist_camera_type == 'opencv': color_image = cam.get_frame() if color_image is None: - print("[Image Server] Wrist camera frame read is error.") + logger_mp.error("[Image Server] Wrist camera frame read is error.") break elif self.wrist_camera_type == 'realsense': color_image, depth_iamge = cam.get_frame() if color_image is None: - print("[Image Server] Wrist camera frame read is error.") + logger_mp.error("[Image Server] Wrist camera frame read is error.") break wrist_frames.append(color_image) wrist_color = cv2.hconcat(wrist_frames) @@ -278,7 +280,7 @@ class ImageServer: ret, buffer = cv2.imencode('.jpg', full_color) if not ret: - print("[Image Server] Frame imencode is failed.") + logger_mp.error("[Image Server] Frame imencode is failed.") continue jpg_bytes = buffer.tobytes() @@ -299,7 +301,7 @@ class ImageServer: self._print_performance_metrics(current_time) except KeyboardInterrupt: - print("[Image Server] Interrupted by user.") + logger_mp.warning("[Image Server] Interrupted by user.") finally: self._close() diff --git a/teleop/open_television/_test_television.py b/teleop/open_television/_test_television.py index 79e1902..ccb2005 100644 --- a/teleop/open_television/_test_television.py +++ b/teleop/open_television/_test_television.py @@ -8,6 +8,8 @@ import time import numpy as np from multiprocessing import shared_memory from open_television import TeleVision +import logging_mp +logger_mp = logging_mp.get_logger(__name__, level=logging_mp.INFO) def run_test_television(): # image @@ -30,54 +32,54 @@ def run_test_television(): input("Press Enter to start television test...") running = True while running: - print("=" * 80) - print("Common Data (always available):") - print(f"head_pose shape: {tv.head_pose.shape}\n{tv.head_pose}\n") - print(f"left_arm_pose shape: {tv.left_arm_pose.shape}\n{tv.left_arm_pose}\n") - print(f"right_arm_pose shape: {tv.right_arm_pose.shape}\n{tv.right_arm_pose}\n") - print("=" * 80) + logger_mp.info("=" * 80) + logger_mp.info("Common Data (always available):") + logger_mp.info(f"head_pose shape: {tv.head_pose.shape}\n{tv.head_pose}\n") + logger_mp.info(f"left_arm_pose shape: {tv.left_arm_pose.shape}\n{tv.left_arm_pose}\n") + logger_mp.info(f"right_arm_pose shape: {tv.right_arm_pose.shape}\n{tv.right_arm_pose}\n") + logger_mp.info("=" * 80) if use_hand_track: - print("Hand Tracking Data:") - print(f"left_hand_positions shape: {tv.left_hand_positions.shape}\n{tv.left_hand_positions}\n") - print(f"right_hand_positions shape: {tv.right_hand_positions.shape}\n{tv.right_hand_positions}\n") - print(f"left_hand_orientations shape: {tv.left_hand_orientations.shape}\n{tv.left_hand_orientations}\n") - print(f"right_hand_orientations shape: {tv.right_hand_orientations.shape}\n{tv.right_hand_orientations}\n") - print(f"left_hand_pinch_state: {tv.left_hand_pinch_state}") - print(f"left_hand_pinch_value: {tv.left_hand_pinch_value}") - print(f"left_hand_squeeze_state: {tv.left_hand_squeeze_state}") - print(f"left_hand_squeeze_value: {tv.left_hand_squeeze_value}") - print(f"right_hand_pinch_state: {tv.right_hand_pinch_state}") - print(f"right_hand_pinch_value: {tv.right_hand_pinch_value}") - print(f"right_hand_squeeze_state: {tv.right_hand_squeeze_state}") - print(f"right_hand_squeeze_value: {tv.right_hand_squeeze_value}") + logger_mp.info("Hand Tracking Data:") + logger_mp.info(f"left_hand_positions shape: {tv.left_hand_positions.shape}\n{tv.left_hand_positions}\n") + logger_mp.info(f"right_hand_positions shape: {tv.right_hand_positions.shape}\n{tv.right_hand_positions}\n") + logger_mp.info(f"left_hand_orientations shape: {tv.left_hand_orientations.shape}\n{tv.left_hand_orientations}\n") + logger_mp.info(f"right_hand_orientations shape: {tv.right_hand_orientations.shape}\n{tv.right_hand_orientations}\n") + logger_mp.info(f"left_hand_pinch_state: {tv.left_hand_pinch_state}") + logger_mp.info(f"left_hand_pinch_value: {tv.left_hand_pinch_value}") + logger_mp.info(f"left_hand_squeeze_state: {tv.left_hand_squeeze_state}") + logger_mp.info(f"left_hand_squeeze_value: {tv.left_hand_squeeze_value}") + logger_mp.info(f"right_hand_pinch_state: {tv.right_hand_pinch_state}") + logger_mp.info(f"right_hand_pinch_value: {tv.right_hand_pinch_value}") + logger_mp.info(f"right_hand_squeeze_state: {tv.right_hand_squeeze_state}") + logger_mp.info(f"right_hand_squeeze_value: {tv.right_hand_squeeze_value}") else: - print("Controller Data:") - print(f"left_controller_trigger_state: {tv.left_controller_trigger_state}") - print(f"left_controller_trigger_value: {tv.left_controller_trigger_value}") - print(f"left_controller_squeeze_state: {tv.left_controller_squeeze_state}") - print(f"left_controller_squeeze_value: {tv.left_controller_squeeze_value}") - print(f"left_controller_thumbstick_state: {tv.left_controller_thumbstick_state}") - print(f"left_controller_thumbstick_value: {tv.left_controller_thumbstick_value}") - print(f"left_controller_aButton: {tv.left_controller_aButton}") - print(f"left_controller_bButton: {tv.left_controller_bButton}") - print(f"right_controller_trigger_state: {tv.right_controller_trigger_state}") - print(f"right_controller_trigger_value: {tv.right_controller_trigger_value}") - print(f"right_controller_squeeze_state: {tv.right_controller_squeeze_state}") - print(f"right_controller_squeeze_value: {tv.right_controller_squeeze_value}") - print(f"right_controller_thumbstick_state: {tv.right_controller_thumbstick_state}") - print(f"right_controller_thumbstick_value: {tv.right_controller_thumbstick_value}") - print(f"right_controller_aButton: {tv.right_controller_aButton}") - print(f"right_controller_bButton: {tv.right_controller_bButton}") - print("=" * 80) + logger_mp.info("Controller Data:") + logger_mp.info(f"left_controller_trigger_state: {tv.left_controller_trigger_state}") + logger_mp.info(f"left_controller_trigger_value: {tv.left_controller_trigger_value}") + logger_mp.info(f"left_controller_squeeze_state: {tv.left_controller_squeeze_state}") + logger_mp.info(f"left_controller_squeeze_value: {tv.left_controller_squeeze_value}") + logger_mp.info(f"left_controller_thumbstick_state: {tv.left_controller_thumbstick_state}") + logger_mp.info(f"left_controller_thumbstick_value: {tv.left_controller_thumbstick_value}") + logger_mp.info(f"left_controller_aButton: {tv.left_controller_aButton}") + logger_mp.info(f"left_controller_bButton: {tv.left_controller_bButton}") + logger_mp.info(f"right_controller_trigger_state: {tv.right_controller_trigger_state}") + logger_mp.info(f"right_controller_trigger_value: {tv.right_controller_trigger_value}") + logger_mp.info(f"right_controller_squeeze_state: {tv.right_controller_squeeze_state}") + logger_mp.info(f"right_controller_squeeze_value: {tv.right_controller_squeeze_value}") + logger_mp.info(f"right_controller_thumbstick_state: {tv.right_controller_thumbstick_state}") + logger_mp.info(f"right_controller_thumbstick_value: {tv.right_controller_thumbstick_value}") + logger_mp.info(f"right_controller_aButton: {tv.right_controller_aButton}") + logger_mp.info(f"right_controller_bButton: {tv.right_controller_bButton}") + logger_mp.info("=" * 80) time.sleep(0.03) except KeyboardInterrupt: running = False - print("KeyboardInterrupt, exiting program...") + logger_mp.info("KeyboardInterrupt, exiting program...") finally: image_shm.unlink() image_shm.close() - print("Finally, exiting program...") + logger_mp.info("Finally, exiting program...") exit(0) if __name__ == '__main__': diff --git a/teleop/open_television/_test_tv_wrapper.py b/teleop/open_television/_test_tv_wrapper.py index 00f8330..47163b0 100644 --- a/teleop/open_television/_test_tv_wrapper.py +++ b/teleop/open_television/_test_tv_wrapper.py @@ -8,6 +8,9 @@ import numpy as np import time from multiprocessing import shared_memory from open_television import TeleVisionWrapper +import logging_mp +logger_mp = logging_mp.get_logger(__name__, level=logging_mp.INFO) + def run_test_tv_wrapper(): image_shape = (480, 640 * 2, 3) @@ -31,57 +34,57 @@ def run_test_tv_wrapper(): start_time = time.time() teleData = tv_wrapper.get_motion_state_data() - print("=== TeleData Snapshot ===") - print("[Head Rotation Matrix]:\n", teleData.head_rotation) - print("[Left EE Pose]:\n", teleData.left_arm_pose) - print("[Right EE Pose]:\n", teleData.right_arm_pose) + logger_mp.info("=== TeleData Snapshot ===") + logger_mp.info("[Head Rotation Matrix]:\n", teleData.head_rotation) + logger_mp.info("[Left EE Pose]:\n", teleData.left_arm_pose) + logger_mp.info("[Right EE Pose]:\n", teleData.right_arm_pose) if use_hand_track: - print("[Left Hand Position] shape {}:\n{}".format(teleData.left_hand_pos.shape, teleData.left_hand_pos)) - print("[Right Hand Position] shape {}:\n{}".format(teleData.right_hand_pos.shape, teleData.right_hand_pos)) + logger_mp.info("[Left Hand Position] shape {}:\n{}".format(teleData.left_hand_pos.shape, teleData.left_hand_pos)) + logger_mp.info("[Right Hand Position] shape {}:\n{}".format(teleData.right_hand_pos.shape, teleData.right_hand_pos)) if teleData.left_hand_rot is not None: - print("[Left Hand Rotation] shape {}:\n{}".format(teleData.left_hand_rot.shape, teleData.left_hand_rot)) + logger_mp.info("[Left Hand Rotation] shape {}:\n{}".format(teleData.left_hand_rot.shape, teleData.left_hand_rot)) if teleData.right_hand_rot is not None: - print("[Right Hand Rotation] shape {}:\n{}".format(teleData.right_hand_rot.shape, teleData.right_hand_rot)) + logger_mp.info("[Right Hand Rotation] shape {}:\n{}".format(teleData.right_hand_rot.shape, teleData.right_hand_rot)) if teleData.left_pinch_value is not None: - print("[Left Pinch Value]: {:.2f}".format(teleData.left_pinch_value)) + logger_mp.info("[Left Pinch Value]: {:.2f}".format(teleData.left_pinch_value)) if teleData.right_pinch_value is not None: - print("[Right Pinch Value]: {:.2f}".format(teleData.right_pinch_value)) + logger_mp.info("[Right Pinch Value]: {:.2f}".format(teleData.right_pinch_value)) if teleData.tele_state: state = teleData.tele_state - print("[Hand State]:") - print(f" Left Pinch state: {state.left_pinch_state}") - print(f" Left Squeeze: {state.left_squeeze_state} ({state.left_squeeze_value:.2f})") - print(f" Right Pinch state: {state.right_pinch_state}") - print(f" Right Squeeze: {state.right_squeeze_state} ({state.right_squeeze_value:.2f})") + logger_mp.info("[Hand State]:") + logger_mp.info(f" Left Pinch state: {state.left_pinch_state}") + logger_mp.info(f" Left Squeeze: {state.left_squeeze_state} ({state.left_squeeze_value:.2f})") + logger_mp.info(f" Right Pinch state: {state.right_pinch_state}") + logger_mp.info(f" Right Squeeze: {state.right_squeeze_state} ({state.right_squeeze_value:.2f})") else: - print(f"[Left Trigger Value]: {teleData.left_trigger_value:.2f}") - print(f"[Right Trigger Value]: {teleData.right_trigger_value:.2f}") + logger_mp.info(f"[Left Trigger Value]: {teleData.left_trigger_value:.2f}") + logger_mp.info(f"[Right Trigger Value]: {teleData.right_trigger_value:.2f}") if teleData.tele_state: state = teleData.tele_state - print("[Controller State]:") - print(f" Left Trigger: {state.left_trigger_state}") - print(f" Left Squeeze: {state.left_squeeze_ctrl_state} ({state.left_squeeze_ctrl_value:.2f})") - print(f" Left Thumbstick: {state.left_thumbstick_state} ({state.left_thumbstick_value})") - print(f" Left A/B Buttons: A={state.left_aButton}, B={state.left_bButton}") - print(f" Right Trigger: {state.right_trigger_state}") - print(f" Right Squeeze: {state.right_squeeze_ctrl_state} ({state.right_squeeze_ctrl_value:.2f})") - print(f" Right Thumbstick: {state.right_thumbstick_state} ({state.right_thumbstick_value})") - print(f" Right A/B Buttons: A={state.right_aButton}, B={state.right_bButton}") + logger_mp.info("[Controller State]:") + logger_mp.info(f" Left Trigger: {state.left_trigger_state}") + logger_mp.info(f" Left Squeeze: {state.left_squeeze_ctrl_state} ({state.left_squeeze_ctrl_value:.2f})") + logger_mp.info(f" Left Thumbstick: {state.left_thumbstick_state} ({state.left_thumbstick_value})") + logger_mp.info(f" Left A/B Buttons: A={state.left_aButton}, B={state.left_bButton}") + logger_mp.info(f" Right Trigger: {state.right_trigger_state}") + logger_mp.info(f" Right Squeeze: {state.right_squeeze_ctrl_state} ({state.right_squeeze_ctrl_value:.2f})") + logger_mp.info(f" Right Thumbstick: {state.right_thumbstick_state} ({state.right_thumbstick_value})") + logger_mp.info(f" Right A/B Buttons: A={state.right_aButton}, B={state.right_bButton}") current_time = time.time() time_elapsed = current_time - start_time sleep_time = max(0, 0.033 - time_elapsed) time.sleep(sleep_time) - # print(f"main process sleep: {sleep_time}") + logger_mp.debug(f"main process sleep: {sleep_time}") except KeyboardInterrupt: running = False - print("KeyboardInterrupt, exiting program...") + logger_mp.info("KeyboardInterrupt, exiting program...") finally: image_shm.unlink() image_shm.close() - print("Finally, exiting program...") + logger_mp.info("Finally, exiting program...") exit(0) if __name__ == '__main__': diff --git a/teleop/robot_control/hand_retargeting.py b/teleop/robot_control/hand_retargeting.py index a0975c5..77c6a18 100644 --- a/teleop/robot_control/hand_retargeting.py +++ b/teleop/robot_control/hand_retargeting.py @@ -2,6 +2,8 @@ from .dex_retargeting.retargeting_config import RetargetingConfig from pathlib import Path import yaml from enum import Enum +import logging_mp +logger_mp = logging_mp.get_logger(__name__) class HandType(Enum): INSPIRE_HAND = "../assets/inspire_hand/inspire_hand.yml" @@ -49,11 +51,11 @@ class HandRetargeting: self.right_dex_retargeting_to_hardware = [ self.right_retargeting_joint_names.index(name) for name in self.right_dex3_api_joint_names] # Archive: This is the joint order of the dex-retargeting library version 0.1.1. - # print([joint.get_name() for joint in self.left_retargeting.optimizer.robot.get_active_joints()]) + # logger_mp.info([joint.get_name() for joint in self.left_retargeting.optimizer.robot.get_active_joints()]) # ['left_hand_thumb_0_joint', 'left_hand_thumb_1_joint', 'left_hand_thumb_2_joint', # 'left_hand_middle_0_joint', 'left_hand_middle_1_joint', # 'left_hand_index_0_joint', 'left_hand_index_1_joint'] - # print([joint.get_name() for joint in self.right_retargeting.optimizer.robot.get_active_joints()]) + # logger_mp.info([joint.get_name() for joint in self.right_retargeting.optimizer.robot.get_active_joints()]) # ['right_hand_thumb_0_joint', 'right_hand_thumb_1_joint', 'right_hand_thumb_2_joint', # 'right_hand_middle_0_joint', 'right_hand_middle_1_joint', # 'right_hand_index_0_joint', 'right_hand_index_1_joint'] @@ -66,11 +68,11 @@ class HandRetargeting: self.right_dex_retargeting_to_hardware = [ self.right_retargeting_joint_names.index(name) for name in self.right_inspire_api_joint_names] except FileNotFoundError: - print(f"Configuration file not found: {config_file_path}") + logger_mp.warning(f"Configuration file not found: {config_file_path}") raise except yaml.YAMLError as e: - print(f"YAML error while reading {config_file_path}: {e}") + logger_mp.warning(f"YAML error while reading {config_file_path}: {e}") raise except Exception as e: - print(f"An error occurred: {e}") + logger_mp.error(f"An error occurred: {e}") raise \ No newline at end of file diff --git a/teleop/robot_control/robot_arm.py b/teleop/robot_control/robot_arm.py index d728024..1b1a753 100644 --- a/teleop/robot_control/robot_arm.py +++ b/teleop/robot_control/robot_arm.py @@ -11,6 +11,9 @@ from unitree_sdk2py.utils.crc import CRC from unitree_sdk2py.idl.unitree_go.msg.dds_ import ( LowCmd_ as go_LowCmd, LowState_ as go_LowState) # idl for h1 from unitree_sdk2py.idl.default import unitree_go_msg_dds__LowCmd_ +import logging_mp +logger_mp = logging_mp.get_logger(__name__) + kTopicLowCommand_Debug = "rt/lowcmd" kTopicLowCommand_Motion = "rt/arm_sdk" kTopicLowState = "rt/lowstate" @@ -57,7 +60,7 @@ class DataBuffer: class G1_29_ArmController: def __init__(self, debug_mode = True): - print("Initialize G1_29_ArmController...") + logger_mp.info("Initialize G1_29_ArmController...") self.q_target = np.zeros(14) self.tauff_target = np.zeros(14) self.debug_mode = debug_mode @@ -95,7 +98,7 @@ class G1_29_ArmController: while not self.lowstate_buffer.GetData(): time.sleep(0.01) - print("[G1_29_ArmController] Waiting to subscribe dds...") + logger_mp.warning("[G1_29_ArmController] Waiting to subscribe dds...") # initialize hg's lowcmd msg self.crc = CRC() @@ -104,9 +107,9 @@ class G1_29_ArmController: self.msg.mode_machine = self.get_mode_machine() self.all_motor_q = self.get_current_motor_q() - print(f"Current all body motor state q:\n{self.all_motor_q} \n") - print(f"Current two arms motor state q:\n{self.get_current_dual_arm_q()}\n") - print("Lock all joints except two arms...\n") + logger_mp.info(f"Current all body motor state q:\n{self.all_motor_q} \n") + logger_mp.info(f"Current two arms motor state q:\n{self.get_current_dual_arm_q()}\n") + logger_mp.info("Lock all joints except two arms...\n") arm_indices = set(member.value for member in G1_29_JointArmIndex) for id in G1_29_JointIndex: @@ -126,7 +129,7 @@ class G1_29_ArmController: self.msg.motor_cmd[id].kp = self.kp_high self.msg.motor_cmd[id].kd = self.kd_high self.msg.motor_cmd[id].q = self.all_motor_q[id] - print("Lock OK!\n") + logger_mp.info("Lock OK!\n") # initialize publish thread self.publish_thread = threading.Thread(target=self._ctrl_motor_state) @@ -134,7 +137,7 @@ class G1_29_ArmController: self.publish_thread.daemon = True self.publish_thread.start() - print("Initialize G1_29_ArmController OK!\n") + logger_mp.info("Initialize G1_29_ArmController OK!\n") def _subscribe_motor_state(self): while True: @@ -183,8 +186,8 @@ class G1_29_ArmController: all_t_elapsed = current_time - start_time sleep_time = max(0, (self.control_dt - all_t_elapsed)) time.sleep(sleep_time) - # print(f"arm_velocity_limit:{self.arm_velocity_limit}") - # print(f"sleep_time:{sleep_time}") + # logger_mp.debug(f"arm_velocity_limit:{self.arm_velocity_limit}") + # logger_mp.debug(f"sleep_time:{sleep_time}") def ctrl_dual_arm(self, q_target, tauff_target): '''Set control target values q & tau of the left and right arm motors.''' @@ -210,7 +213,7 @@ class G1_29_ArmController: def ctrl_dual_arm_go_home(self): '''Move both the left and right arms of the robot to their home position by setting the target joint angles (q) and torques (tau) to zero.''' - print("[G1_29_ArmController] ctrl_dual_arm_go_home start...") + logger_mp.info("[G1_29_ArmController] ctrl_dual_arm_go_home start...") with self.ctrl_lock: self.q_target = np.zeros(14) # self.tauff_target = np.zeros(14) @@ -222,7 +225,7 @@ class G1_29_ArmController: for weight in np.arange(1, 0, -0.01): self.msg.motor_cmd[G1_29_JointIndex.kNotUsedJoint0].q = weight; time.sleep(0.02) - print("[G1_29_ArmController] both arms have reached the home position.") + logger_mp.info("[G1_29_ArmController] both arms have reached the home position.") break time.sleep(0.05) @@ -332,7 +335,7 @@ class G1_29_JointIndex(IntEnum): class G1_23_ArmController: def __init__(self): - print("Initialize G1_23_ArmController...") + logger_mp.info("Initialize G1_23_ArmController...") self.q_target = np.zeros(10) self.tauff_target = np.zeros(10) @@ -366,7 +369,7 @@ class G1_23_ArmController: while not self.lowstate_buffer.GetData(): time.sleep(0.01) - print("[G1_23_ArmController] Waiting to subscribe dds...") + logger_mp.warning("[G1_23_ArmController] Waiting to subscribe dds...") # initialize hg's lowcmd msg self.crc = CRC() @@ -375,9 +378,9 @@ class G1_23_ArmController: self.msg.mode_machine = self.get_mode_machine() self.all_motor_q = self.get_current_motor_q() - print(f"Current all body motor state q:\n{self.all_motor_q} \n") - print(f"Current two arms motor state q:\n{self.get_current_dual_arm_q()}\n") - print("Lock all joints except two arms...\n") + logger_mp.info(f"Current all body motor state q:\n{self.all_motor_q} \n") + logger_mp.info(f"Current two arms motor state q:\n{self.get_current_dual_arm_q()}\n") + logger_mp.info("Lock all joints except two arms...\n") arm_indices = set(member.value for member in G1_23_JointArmIndex) for id in G1_23_JointIndex: @@ -397,7 +400,7 @@ class G1_23_ArmController: self.msg.motor_cmd[id].kp = self.kp_high self.msg.motor_cmd[id].kd = self.kd_high self.msg.motor_cmd[id].q = self.all_motor_q[id] - print("Lock OK!\n") + logger_mp.info("Lock OK!\n") # initialize publish thread self.publish_thread = threading.Thread(target=self._ctrl_motor_state) @@ -405,7 +408,7 @@ class G1_23_ArmController: self.publish_thread.daemon = True self.publish_thread.start() - print("Initialize G1_23_ArmController OK!\n") + logger_mp.info("Initialize G1_23_ArmController OK!\n") def _subscribe_motor_state(self): while True: @@ -451,8 +454,8 @@ class G1_23_ArmController: all_t_elapsed = current_time - start_time sleep_time = max(0, (self.control_dt - all_t_elapsed)) time.sleep(sleep_time) - # print(f"arm_velocity_limit:{self.arm_velocity_limit}") - # print(f"sleep_time:{sleep_time}") + # logger_mp.debug(f"arm_velocity_limit:{self.arm_velocity_limit}") + # logger_mp.debug(f"sleep_time:{sleep_time}") def ctrl_dual_arm(self, q_target, tauff_target): '''Set control target values q & tau of the left and right arm motors.''' @@ -478,7 +481,7 @@ class G1_23_ArmController: def ctrl_dual_arm_go_home(self): '''Move both the left and right arms of the robot to their home position by setting the target joint angles (q) and torques (tau) to zero.''' - print("[G1_23_ArmController] ctrl_dual_arm_go_home start...") + logger_mp.info("[G1_23_ArmController] ctrl_dual_arm_go_home start...") with self.ctrl_lock: self.q_target = np.zeros(10) # self.tauff_target = np.zeros(10) @@ -486,7 +489,7 @@ class G1_23_ArmController: while True: current_q = self.get_current_dual_arm_q() if np.all(np.abs(current_q) < tolerance): - print("[G1_23_ArmController] both arms have reached the home position.") + logger_mp.info("[G1_23_ArmController] both arms have reached the home position.") break time.sleep(0.05) @@ -588,7 +591,7 @@ class G1_23_JointIndex(IntEnum): class H1_2_ArmController: def __init__(self): - print("Initialize H1_2_ArmController...") + logger_mp.info("Initialize H1_2_ArmController...") self.q_target = np.zeros(14) self.tauff_target = np.zeros(14) @@ -622,7 +625,7 @@ class H1_2_ArmController: while not self.lowstate_buffer.GetData(): time.sleep(0.01) - print("[H1_2_ArmController] Waiting to subscribe dds...") + logger_mp.warning("[H1_2_ArmController] Waiting to subscribe dds...") # initialize hg's lowcmd msg self.crc = CRC() @@ -631,9 +634,9 @@ class H1_2_ArmController: self.msg.mode_machine = self.get_mode_machine() self.all_motor_q = self.get_current_motor_q() - print(f"Current all body motor state q:\n{self.all_motor_q} \n") - print(f"Current two arms motor state q:\n{self.get_current_dual_arm_q()}\n") - print("Lock all joints except two arms...\n") + logger_mp.info(f"Current all body motor state q:\n{self.all_motor_q} \n") + logger_mp.info(f"Current two arms motor state q:\n{self.get_current_dual_arm_q()}\n") + logger_mp.info("Lock all joints except two arms...\n") arm_indices = set(member.value for member in H1_2_JointArmIndex) for id in H1_2_JointIndex: @@ -653,7 +656,7 @@ class H1_2_ArmController: self.msg.motor_cmd[id].kp = self.kp_high self.msg.motor_cmd[id].kd = self.kd_high self.msg.motor_cmd[id].q = self.all_motor_q[id] - print("Lock OK!\n") + logger_mp.info("Lock OK!\n") # initialize publish thread self.publish_thread = threading.Thread(target=self._ctrl_motor_state) @@ -661,7 +664,7 @@ class H1_2_ArmController: self.publish_thread.daemon = True self.publish_thread.start() - print("Initialize H1_2_ArmController OK!\n") + logger_mp.info("Initialize H1_2_ArmController OK!\n") def _subscribe_motor_state(self): while True: @@ -707,8 +710,8 @@ class H1_2_ArmController: all_t_elapsed = current_time - start_time sleep_time = max(0, (self.control_dt - all_t_elapsed)) time.sleep(sleep_time) - # print(f"arm_velocity_limit:{self.arm_velocity_limit}") - # print(f"sleep_time:{sleep_time}") + # logger_mp.debug(f"arm_velocity_limit:{self.arm_velocity_limit}") + # logger_mp.debug(f"sleep_time:{sleep_time}") def ctrl_dual_arm(self, q_target, tauff_target): '''Set control target values q & tau of the left and right arm motors.''' @@ -734,7 +737,7 @@ class H1_2_ArmController: def ctrl_dual_arm_go_home(self): '''Move both the left and right arms of the robot to their home position by setting the target joint angles (q) and torques (tau) to zero.''' - print("[H1_2_ArmController] ctrl_dual_arm_go_home start...") + logger_mp.info("[H1_2_ArmController] ctrl_dual_arm_go_home start...") with self.ctrl_lock: self.q_target = np.zeros(14) # self.tauff_target = np.zeros(14) @@ -742,7 +745,7 @@ class H1_2_ArmController: while True: current_q = self.get_current_dual_arm_q() if np.all(np.abs(current_q) < tolerance): - print("[H1_2_ArmController] both arms have reached the home position.") + logger_mp.info("[H1_2_ArmController] both arms have reached the home position.") break time.sleep(0.05) @@ -851,7 +854,7 @@ class H1_2_JointIndex(IntEnum): class H1_ArmController: def __init__(self): - print("Initialize H1_ArmController...") + logger_mp.info("Initialize H1_ArmController...") self.q_target = np.zeros(8) self.tauff_target = np.zeros(8) @@ -883,7 +886,7 @@ class H1_ArmController: while not self.lowstate_buffer.GetData(): time.sleep(0.01) - print("[H1_ArmController] Waiting to subscribe dds...") + logger_mp.warning("[H1_ArmController] Waiting to subscribe dds...") # initialize h1's lowcmd msg self.crc = CRC() @@ -894,9 +897,9 @@ class H1_ArmController: self.msg.gpio = 0 self.all_motor_q = self.get_current_motor_q() - print(f"Current all body motor state q:\n{self.all_motor_q} \n") - print(f"Current two arms motor state q:\n{self.get_current_dual_arm_q()}\n") - print("Lock all joints except two arms...\n") + logger_mp.info(f"Current all body motor state q:\n{self.all_motor_q} \n") + logger_mp.info(f"Current two arms motor state q:\n{self.get_current_dual_arm_q()}\n") + logger_mp.info("Lock all joints except two arms...\n") for id in H1_JointIndex: if self._Is_weak_motor(id): @@ -908,7 +911,7 @@ class H1_ArmController: self.msg.motor_cmd[id].kd = self.kd_high self.msg.motor_cmd[id].mode = 0x0A self.msg.motor_cmd[id].q = self.all_motor_q[id] - print("Lock OK!\n") + logger_mp.info("Lock OK!\n") # initialize publish thread self.publish_thread = threading.Thread(target=self._ctrl_motor_state) @@ -916,7 +919,7 @@ class H1_ArmController: self.publish_thread.daemon = True self.publish_thread.start() - print("Initialize H1_ArmController OK!\n") + logger_mp.info("Initialize H1_ArmController OK!\n") def _subscribe_motor_state(self): while True: @@ -962,8 +965,8 @@ class H1_ArmController: all_t_elapsed = current_time - start_time sleep_time = max(0, (self.control_dt - all_t_elapsed)) time.sleep(sleep_time) - # print(f"arm_velocity_limit:{self.arm_velocity_limit}") - # print(f"sleep_time:{sleep_time}") + # logger_mp.debug(f"arm_velocity_limit:{self.arm_velocity_limit}") + # logger_mp.debug(f"sleep_time:{sleep_time}") def ctrl_dual_arm(self, q_target, tauff_target): '''Set control target values q & tau of the left and right arm motors.''' @@ -985,7 +988,7 @@ class H1_ArmController: def ctrl_dual_arm_go_home(self): '''Move both the left and right arms of the robot to their home position by setting the target joint angles (q) and torques (tau) to zero.''' - print("[H1_ArmController] ctrl_dual_arm_go_home start...") + logger_mp.info("[H1_ArmController] ctrl_dual_arm_go_home start...") with self.ctrl_lock: self.q_target = np.zeros(8) # self.tauff_target = np.zeros(8) @@ -993,7 +996,7 @@ class H1_ArmController: while True: current_q = self.get_current_dual_arm_q() if np.all(np.abs(current_q) < tolerance): - print("[H1_ArmController] both arms have reached the home position.") + logger_mp.info("[H1_ArmController] both arms have reached the home position.") break time.sleep(0.05) diff --git a/teleop/robot_control/robot_arm_ik.py b/teleop/robot_control/robot_arm_ik.py index 57c92e0..0a94d11 100644 --- a/teleop/robot_control/robot_arm_ik.py +++ b/teleop/robot_control/robot_arm_ik.py @@ -7,7 +7,8 @@ from pinocchio import casadi as cpin from pinocchio.visualize import MeshcatVisualizer import os import sys - +import logging_mp +logger_mp = logging_mp.get_logger(__name__) parent2_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) sys.path.append(parent2_dir) @@ -83,9 +84,9 @@ class G1_29_ArmIK: # for i in range(self.reduced_robot.model.nframes): # frame = self.reduced_robot.model.frames[i] # frame_id = self.reduced_robot.model.getFrameId(frame.name) - # print(f"Frame ID: {frame_id}, Name: {frame.name}") + # logger_mp.debug(f"Frame ID: {frame_id}, Name: {frame.name}") # for idx, name in enumerate(self.reduced_robot.model.names): - # print(f"{idx}: {name}") + # logger_mp.debug(f"{idx}: {name}") # Creating Casadi models and data for symbolic computing self.cmodel = cpin.Model(self.reduced_robot.model) @@ -237,7 +238,7 @@ class G1_29_ArmIK: return sol_q, sol_tauff except Exception as e: - print(f"ERROR in convergence, plotting debug info.{e}") + logger_mp.error(f"ERROR in convergence, plotting debug info.{e}") sol_q = self.opti.debug.value(self.var_q) self.smooth_filter.add_data(sol_q) @@ -252,7 +253,7 @@ class G1_29_ArmIK: sol_tauff = pin.rnea(self.reduced_robot.model, self.reduced_robot.data, sol_q, v, np.zeros(self.reduced_robot.model.nv)) - print(f"sol_q:{sol_q} \nmotorstate: \n{current_lr_arm_motor_q} \nleft_pose: \n{left_wrist} \nright_pose: \n{right_wrist}") + logger_mp.error(f"sol_q:{sol_q} \nmotorstate: \n{current_lr_arm_motor_q} \nleft_pose: \n{left_wrist} \nright_pose: \n{right_wrist}") if self.Visualization: self.vis.display(sol_q) # for visualization @@ -311,7 +312,7 @@ class G1_23_ArmIK: # for i in range(self.reduced_robot.model.nframes): # frame = self.reduced_robot.model.frames[i] # frame_id = self.reduced_robot.model.getFrameId(frame.name) - # print(f"Frame ID: {frame_id}, Name: {frame.name}") + # logger_mp.debug(f"Frame ID: {frame_id}, Name: {frame.name}") # Creating Casadi models and data for symbolic computing self.cmodel = cpin.Model(self.reduced_robot.model) @@ -463,7 +464,7 @@ class G1_23_ArmIK: return sol_q, sol_tauff except Exception as e: - print(f"ERROR in convergence, plotting debug info.{e}") + logger_mp.error(f"ERROR in convergence, plotting debug info.{e}") sol_q = self.opti.debug.value(self.var_q) self.smooth_filter.add_data(sol_q) @@ -478,7 +479,7 @@ class G1_23_ArmIK: sol_tauff = pin.rnea(self.reduced_robot.model, self.reduced_robot.data, sol_q, v, np.zeros(self.reduced_robot.model.nv)) - print(f"sol_q:{sol_q} \nmotorstate: \n{current_lr_arm_motor_q} \nleft_pose: \n{left_wrist} \nright_pose: \n{right_wrist}") + logger_mp.error(f"sol_q:{sol_q} \nmotorstate: \n{current_lr_arm_motor_q} \nleft_pose: \n{left_wrist} \nright_pose: \n{right_wrist}") if self.Visualization: self.vis.display(sol_q) # for visualization @@ -562,7 +563,7 @@ class H1_2_ArmIK: # for i in range(self.reduced_robot.model.nframes): # frame = self.reduced_robot.model.frames[i] # frame_id = self.reduced_robot.model.getFrameId(frame.name) - # print(f"Frame ID: {frame_id}, Name: {frame.name}") + # logger_mp.debug(f"Frame ID: {frame_id}, Name: {frame.name}") # Creating Casadi models and data for symbolic computing self.cmodel = cpin.Model(self.reduced_robot.model) @@ -714,7 +715,7 @@ class H1_2_ArmIK: return sol_q, sol_tauff except Exception as e: - print(f"ERROR in convergence, plotting debug info.{e}") + logger_mp.error(f"ERROR in convergence, plotting debug info.{e}") sol_q = self.opti.debug.value(self.var_q) self.smooth_filter.add_data(sol_q) @@ -729,7 +730,7 @@ class H1_2_ArmIK: sol_tauff = pin.rnea(self.reduced_robot.model, self.reduced_robot.data, sol_q, v, np.zeros(self.reduced_robot.model.nv)) - print(f"sol_q:{sol_q} \nmotorstate: \n{current_lr_arm_motor_q} \nleft_pose: \n{left_wrist} \nright_pose: \n{right_wrist}") + logger_mp.error(f"sol_q:{sol_q} \nmotorstate: \n{current_lr_arm_motor_q} \nleft_pose: \n{left_wrist} \nright_pose: \n{right_wrist}") if self.Visualization: self.vis.display(sol_q) # for visualization @@ -816,7 +817,7 @@ class H1_ArmIK: # for i in range(self.reduced_robot.model.nframes): # frame = self.reduced_robot.model.frames[i] # frame_id = self.reduced_robot.model.getFrameId(frame.name) - # print(f"Frame ID: {frame_id}, Name: {frame.name}") + # logger_mp.debug(f"Frame ID: {frame_id}, Name: {frame.name}") # Creating Casadi models and data for symbolic computing self.cmodel = cpin.Model(self.reduced_robot.model) @@ -968,7 +969,7 @@ class H1_ArmIK: return sol_q, sol_tauff except Exception as e: - print(f"ERROR in convergence, plotting debug info.{e}") + logger_mp.error(f"ERROR in convergence, plotting debug info.{e}") sol_q = self.opti.debug.value(self.var_q) self.smooth_filter.add_data(sol_q) @@ -983,7 +984,7 @@ class H1_ArmIK: sol_tauff = pin.rnea(self.reduced_robot.model, self.reduced_robot.data, sol_q, v, np.zeros(self.reduced_robot.model.nv)) - print(f"sol_q:{sol_q} \nmotorstate: \n{current_lr_arm_motor_q} \nleft_pose: \n{left_wrist} \nright_pose: \n{right_wrist}") + logger_mp.error(f"sol_q:{sol_q} \nmotorstate: \n{current_lr_arm_motor_q} \nleft_pose: \n{left_wrist} \nright_pose: \n{right_wrist}") if self.Visualization: self.vis.display(sol_q) # for visualization diff --git a/teleop/robot_control/robot_hand_inspire.py b/teleop/robot_control/robot_hand_inspire.py index 5c69477..562402e 100644 --- a/teleop/robot_control/robot_hand_inspire.py +++ b/teleop/robot_control/robot_hand_inspire.py @@ -8,7 +8,10 @@ import numpy as np from enum import IntEnum import threading import time -from multiprocessing import Process, shared_memory, Array, Lock +from multiprocessing import Process, Array + +import logging_mp +logger_mp = logging_mp.get_logger(__name__) inspire_tip_indices = [4, 9, 14, 19, 24] Inspire_Num_Motors = 6 @@ -18,7 +21,7 @@ kTopicInspireState = "rt/inspire/state" class Inspire_Controller: def __init__(self, left_hand_array, right_hand_array, dual_hand_data_lock = None, dual_hand_state_array = None, dual_hand_action_array = None, fps = 100.0, Unit_Test = False): - print("Initialize Inspire_Controller...") + logger_mp.info("Initialize Inspire_Controller...") self.fps = fps self.Unit_Test = Unit_Test if not self.Unit_Test: @@ -47,14 +50,14 @@ class Inspire_Controller: if any(self.right_hand_state_array): # any(self.left_hand_state_array) and break time.sleep(0.01) - print("[Inspire_Controller] Waiting to subscribe dds...") + logger_mp.warning("[Inspire_Controller] Waiting to subscribe dds...") hand_control_process = Process(target=self.control_process, args=(left_hand_array, right_hand_array, self.left_hand_state_array, self.right_hand_state_array, dual_hand_data_lock, dual_hand_state_array, dual_hand_action_array)) hand_control_process.daemon = True hand_control_process.start() - print("Initialize Inspire_Controller OK!\n") + logger_mp.info("Initialize Inspire_Controller OK!\n") def _subscribe_hand_state(self): while True: @@ -76,7 +79,7 @@ class Inspire_Controller: self.hand_msg.cmds[id].q = right_q_target[idx] self.HandCmb_publisher.Write(self.hand_msg) - # print("hand ctrl publish ok.") + # logger_mp.debug("hand ctrl publish ok.") def control_process(self, left_hand_array, right_hand_array, left_hand_state_array, right_hand_state_array, dual_hand_data_lock = None, dual_hand_state_array = None, dual_hand_action_array = None): @@ -147,7 +150,7 @@ class Inspire_Controller: sleep_time = max(0, (1 / self.fps) - time_elapsed) time.sleep(sleep_time) finally: - print("Inspire_Controller has been closed.") + logger_mp.info("Inspire_Controller has been closed.") # Update hand state, according to the official documentation, https://support.unitree.com/home/en/G1_developer/inspire_dfx_dexterous_hand # the state sequence is as shown in the table below diff --git a/teleop/robot_control/robot_hand_unitree.py b/teleop/robot_control/robot_hand_unitree.py index 4787a59..dbd6935 100644 --- a/teleop/robot_control/robot_hand_unitree.py +++ b/teleop/robot_control/robot_hand_unitree.py @@ -20,6 +20,9 @@ sys.path.append(parent2_dir) from teleop.robot_control.hand_retargeting import HandRetargeting, HandType from teleop.utils.weighted_moving_filter import WeightedMovingFilter +import logging_mp +logger_mp = logging_mp.get_logger(__name__) + unitree_tip_indices = [4, 9, 14] # [thumb, index, middle] in OpenXR Dex3_Num_Motors = 7 @@ -49,7 +52,7 @@ class Dex3_1_Controller: Unit_Test: Whether to enable unit testing """ - print("Initialize Dex3_1_Controller...") + logger_mp.info("Initialize Dex3_1_Controller...") self.fps = fps self.Unit_Test = Unit_Test @@ -83,14 +86,14 @@ class Dex3_1_Controller: if any(self.left_hand_state_array) and any(self.right_hand_state_array): break time.sleep(0.01) - print("[Dex3_1_Controller] Waiting to subscribe dds...") + logger_mp.warning("[Dex3_1_Controller] Waiting to subscribe dds...") hand_control_process = Process(target=self.control_process, args=(left_hand_array_in, right_hand_array_in, self.left_hand_state_array, self.right_hand_state_array, dual_hand_data_lock, dual_hand_state_array_out, dual_hand_action_array_out)) hand_control_process.daemon = True hand_control_process.start() - print("Initialize Dex3_1_Controller OK!\n") + logger_mp.info("Initialize Dex3_1_Controller OK!\n") def _subscribe_hand_state(self): while True: @@ -127,7 +130,7 @@ class Dex3_1_Controller: self.LeftHandCmb_publisher.Write(self.left_msg) self.RightHandCmb_publisher.Write(self.right_msg) - # print("hand ctrl publish ok.") + # logger_mp.debug("hand ctrl publish ok.") def control_process(self, left_hand_array_in, right_hand_array_in, left_hand_state_array, right_hand_state_array, dual_hand_data_lock = None, dual_hand_state_array_out = None, dual_hand_action_array_out = None): @@ -204,7 +207,7 @@ class Dex3_1_Controller: sleep_time = max(0, (1 / self.fps) - time_elapsed) time.sleep(sleep_time) finally: - print("Dex3_1_Controller has been closed.") + logger_mp.info("Dex3_1_Controller has been closed.") class Dex3_1_Left_JointIndex(IntEnum): kLeftHandThumb0 = 0 @@ -251,7 +254,7 @@ class Gripper_Controller: Unit_Test: Whether to enable unit testing """ - print("Initialize Gripper_Controller...") + logger_mp.info("Initialize Gripper_Controller...") self.fps = fps self.Unit_Test = Unit_Test @@ -281,14 +284,14 @@ class Gripper_Controller: if any(state != 0.0 for state in self.dual_gripper_state): break time.sleep(0.01) - print("[Gripper_Controller] Waiting to subscribe dds...") + logger_mp.warning("[Gripper_Controller] Waiting to subscribe dds...") self.gripper_control_thread = threading.Thread(target=self.control_thread, args=(left_gripper_value_in, right_gripper_value_in, self.dual_gripper_state, dual_gripper_data_lock, dual_gripper_state_out, dual_gripper_action_out)) self.gripper_control_thread.daemon = True self.gripper_control_thread.start() - print("Initialize Gripper_Controller OK!\n") + logger_mp.info("Initialize Gripper_Controller OK!\n") def _subscribe_gripper_state(self): while True: @@ -304,7 +307,7 @@ class Gripper_Controller: self.gripper_msg.cmds[id].q = gripper_q_target[idx] self.GripperCmb_publisher.Write(self.gripper_msg) - # print("gripper ctrl publish ok.") + # logger_mp.debug("gripper ctrl publish ok.") def control_thread(self, left_gripper_value_in, right_gripper_value_in, dual_gripper_state_in, dual_hand_data_lock = None, dual_gripper_state_out = None, dual_gripper_action_out = None): @@ -374,9 +377,9 @@ class Gripper_Controller: dual_gripper_state_out[:] = dual_gripper_state - np.array([RIGHT_MAPPED_MIN, LEFT_MAPPED_MIN]) dual_gripper_action_out[:] = dual_gripper_action - np.array([RIGHT_MAPPED_MIN, LEFT_MAPPED_MIN]) - # print(f"LEFT: euclidean:{left_euclidean_distance:.4f} \tstate:{dual_gripper_state_out[1]:.4f}\ + # logger_mp.debug(f"LEFT: euclidean:{left_euclidean_distance:.4f} \tstate:{dual_gripper_state_out[1]:.4f}\ # \ttarget_action:{right_target_action - RIGHT_MAPPED_MIN:.4f} \tactual_action:{dual_gripper_action_out[1]:.4f}") - # print(f"RIGHT:euclidean:{right_euclidean_distance:.4f} \tstate:{dual_gripper_state_out[0]:.4f}\ + # logger_mp.debug(f"RIGHT:euclidean:{right_euclidean_distance:.4f} \tstate:{dual_gripper_state_out[0]:.4f}\ # \ttarget_action:{left_target_action - LEFT_MAPPED_MIN:.4f} \tactual_action:{dual_gripper_action_out[0]:.4f}") self.ctrl_dual_gripper(dual_gripper_action) @@ -385,7 +388,7 @@ class Gripper_Controller: sleep_time = max(0, (1 / self.fps) - time_elapsed) time.sleep(sleep_time) finally: - print("Gripper_Controller has been closed.") + logger_mp.info("Gripper_Controller has been closed.") class Gripper_JointIndex(IntEnum): kLeftGripper = 0 @@ -402,7 +405,7 @@ if __name__ == "__main__": parser.add_argument('--gripper', dest='dex', action='store_false', help='Use gripper') parser.set_defaults(dex=True) args = parser.parse_args() - print(f"args:{args}\n") + logger_mp.info(f"args:{args}\n") # image img_config = { @@ -457,5 +460,5 @@ if __name__ == "__main__": right_hand_array[:] = right_hand.flatten() # with dual_hand_data_lock: - # print(f"state : {list(dual_hand_state_array)} \naction: {list(dual_hand_action_array)} \n") + # logger_mp.info(f"state : {list(dual_hand_state_array)} \naction: {list(dual_hand_action_array)} \n") time.sleep(0.01) diff --git a/teleop/teleop_hand_and_arm.py b/teleop/teleop_hand_and_arm.py index 7a1b353..6426b9c 100644 --- a/teleop/teleop_hand_and_arm.py +++ b/teleop/teleop_hand_and_arm.py @@ -4,6 +4,9 @@ import argparse import cv2 from multiprocessing import shared_memory, Value, Array, Lock import threading +import logging_mp +logging_mp.basic_config(level=logging_mp.INFO) +logger_mp = logging_mp.get_logger(__name__) import os import sys @@ -38,7 +41,7 @@ if __name__ == '__main__': parser.set_defaults(debug = True) args = parser.parse_args() - print(f"args:{args}\n") + logger_mp.info(f"args: {args}") # image client: img_config should be the same as the configuration in image_server.py (of Robot's development computing unit) img_config = { @@ -87,8 +90,9 @@ if __name__ == '__main__': # arm if args.arm == 'G1_29': - arm_ctrl = G1_29_ArmController(debug_mode=args.debug) - arm_ik = G1_29_ArmIK() + # arm_ctrl = G1_29_ArmController(debug_mode=args.debug) + # arm_ik = G1_29_ArmIK() + pass elif args.arm == 'G1_23': arm_ctrl = G1_23_ArmController() arm_ik = G1_23_ArmIK() @@ -138,7 +142,7 @@ if __name__ == '__main__': try: user_input = input("Please enter the start signal (enter 'r' to start the subsequent program):\n") if user_input.lower() == 'r': - arm_ctrl.speed_gradual_max() + # arm_ctrl.speed_gradual_max() running = True while running: start_time = time.time() @@ -190,16 +194,16 @@ if __name__ == '__main__': -tele_data.tele_state.left_thumbstick_value[0] * 0.3, -tele_data.tele_state.right_thumbstick_value[0] * 0.3) - # get current robot state data. - current_lr_arm_q = arm_ctrl.get_current_dual_arm_q() - current_lr_arm_dq = arm_ctrl.get_current_dual_arm_dq() + # # get current robot state data. + # current_lr_arm_q = arm_ctrl.get_current_dual_arm_q() + # current_lr_arm_dq = arm_ctrl.get_current_dual_arm_dq() - # solve ik using motor data and wrist pose, then use ik results to control arms. - time_ik_start = time.time() - sol_q, sol_tauff = arm_ik.solve_ik(tele_data.left_arm_pose, tele_data.right_arm_pose, current_lr_arm_q, current_lr_arm_dq) - time_ik_end = time.time() - # print(f"ik:\t{round(time_ik_end - time_ik_start, 6)}") - arm_ctrl.ctrl_dual_arm(sol_q, sol_tauff) + # # solve ik using motor data and wrist pose, then use ik results to control arms. + # time_ik_start = time.time() + # sol_q, sol_tauff = arm_ik.solve_ik(tele_data.left_arm_pose, tele_data.right_arm_pose, current_lr_arm_q, current_lr_arm_dq) + # time_ik_end = time.time() + # logger_mp.debug(f"ik:\t{round(time_ik_end - time_ik_start, 6)}") + # arm_ctrl.ctrl_dual_arm(sol_q, sol_tauff) # record data if args.record: @@ -325,12 +329,12 @@ if __name__ == '__main__': time_elapsed = current_time - start_time sleep_time = max(0, (1 / args.frequency) - time_elapsed) time.sleep(sleep_time) - # print(f"main process sleep: {sleep_time}") + logger_mp.debug(f"main process sleep: {sleep_time}") except KeyboardInterrupt: - print("KeyboardInterrupt, exiting program...") + logger_mp.info("KeyboardInterrupt, exiting program...") finally: - arm_ctrl.ctrl_dual_arm_go_home() + # arm_ctrl.ctrl_dual_arm_go_home() tv_img_shm.unlink() tv_img_shm.close() if WRIST: @@ -338,5 +342,5 @@ if __name__ == '__main__': wrist_img_shm.close() if args.record: recorder.close() - print("Finally, exiting program...") + logger_mp.info("Finally, exiting program...") exit(0) \ No newline at end of file diff --git a/teleop/utils/episode_writer.py b/teleop/utils/episode_writer.py index ec05ba1..8f255f6 100644 --- a/teleop/utils/episode_writer.py +++ b/teleop/utils/episode_writer.py @@ -7,22 +7,24 @@ import time from .rerun_visualizer import RerunLogger from queue import Queue, Empty from threading import Thread +import logging_mp +logger_mp = logging_mp.get_logger(__name__) class EpisodeWriter(): def __init__(self, task_dir, frequency=30, image_size=[640, 480], rerun_log = True): """ image_size: [width, height] """ - print("==> EpisodeWriter initializing...\n") + logger_mp.info("==> EpisodeWriter initializing...\n") self.task_dir = task_dir self.frequency = frequency self.image_size = image_size self.rerun_log = rerun_log if self.rerun_log: - print("==> RerunLogger initializing...\n") + logger_mp.info("==> RerunLogger initializing...\n") self.rerun_logger = RerunLogger(prefix="online/", IdxRangeBoundary = 60, memory_limit = "300MB") - print("==> RerunLogger initializing ok.\n") + logger_mp.info("==> RerunLogger initializing ok.\n") self.data = {} self.episode_data = [] @@ -32,10 +34,10 @@ class EpisodeWriter(): episode_dirs = [episode_dir for episode_dir in os.listdir(self.task_dir) if 'episode_' in episode_dir] episode_last = sorted(episode_dirs)[-1] if len(episode_dirs) > 0 else None self.episode_id = 0 if episode_last is None else int(episode_last.split('_')[-1]) - print(f"==> task_dir directory already exist, now self.episode_id is:{self.episode_id}\n") + logger_mp.info(f"==> task_dir directory already exist, now self.episode_id is:{self.episode_id}\n") else: os.makedirs(self.task_dir) - print(f"==> episode directory does not exist, now create one.\n") + logger_mp.info(f"==> episode directory does not exist, now create one.\n") self.data_info() self.text_desc() @@ -47,7 +49,7 @@ class EpisodeWriter(): self.worker_thread = Thread(target=self.process_queue) self.worker_thread.start() - print("==> EpisodeWriter initialized successfully.\n") + logger_mp.info("==> EpisodeWriter initialized successfully.\n") def data_info(self, version='1.0.0', date=None, author=None): self.info = { @@ -87,7 +89,7 @@ class EpisodeWriter(): Once successfully created, this function will only be available again after save_episode complete its save task. """ if not self.is_available: - print("==> The class is currently unavailable for new operations. Please wait until ongoing tasks are completed.") + logger_mp.info("==> The class is currently unavailable for new operations. Please wait until ongoing tasks are completed.") return False # Return False if the class is unavailable # Reset episode-related data and create necessary directories @@ -108,7 +110,7 @@ class EpisodeWriter(): self.online_logger = RerunLogger(prefix="online/", IdxRangeBoundary = 60, memory_limit="300MB") self.is_available = False # After the episode is created, the class is marked as unavailable until the episode is successfully saved - print(f"==> New episode created: {self.episode_dir}") + logger_mp.info(f"==> New episode created: {self.episode_dir}") return True # Return True if the episode is successfully created def add_item(self, colors, depths=None, states=None, actions=None, tactiles=None, audios=None): @@ -135,7 +137,7 @@ class EpisodeWriter(): try: self._process_item_data(item_data) except Exception as e: - print(f"Error processing item_data (idx={item_data['idx']}): {e}") + logger_mp.info(f"Error processing item_data (idx={item_data['idx']}): {e}") self.item_data_queue.task_done() except Empty: pass @@ -155,7 +157,7 @@ class EpisodeWriter(): for idx_color, (color_key, color) in enumerate(colors.items()): color_name = f'{str(idx).zfill(6)}_{color_key}.jpg' if not cv2.imwrite(os.path.join(self.color_dir, color_name), color): - print(f"Failed to save color image.") + logger_mp.info(f"Failed to save color image.") item_data['colors'][color_key] = os.path.join('colors', color_name) # Save depths @@ -163,7 +165,7 @@ class EpisodeWriter(): for idx_depth, (depth_key, depth) in enumerate(depths.items()): depth_name = f'{str(idx).zfill(6)}_{depth_key}.jpg' if not cv2.imwrite(os.path.join(self.depth_dir, depth_name), depth): - print(f"Failed to save depth image.") + logger_mp.info(f"Failed to save depth image.") item_data['depths'][depth_key] = os.path.join('depths', depth_name) # Save audios @@ -179,7 +181,7 @@ class EpisodeWriter(): # Log data if necessary if self.rerun_log: curent_record_time = time.time() - print(f"==> episode_id:{self.episode_id} item_id:{self.item_id} current_time:{curent_record_time}") + logger_mp.info(f"==> episode_id:{self.episode_id} item_id:{self.item_id} current_time:{curent_record_time}") self.rerun_logger.log_item_data(item_data) def save_episode(self): @@ -187,7 +189,7 @@ class EpisodeWriter(): Trigger the save operation. This sets the save flag, and the process_queue thread will handle it. """ self.need_save = True # Set the save flag - print(f"==> Episode saved start...") + logger_mp.info(f"==> Episode saved start...") def _save_episode(self): """ @@ -200,7 +202,7 @@ class EpisodeWriter(): jsonf.write(json.dumps(self.data, indent=4, ensure_ascii=False)) self.need_save = False # Reset the save flag self.is_available = True # Mark the class as available after saving - print(f"==> Episode saved successfully to {self.json_path}.") + logger_mp.info(f"==> Episode saved successfully to {self.json_path}.") def close(self): """ diff --git a/teleop/utils/rerun_visualizer.py b/teleop/utils/rerun_visualizer.py index f5c8a87..631fd6c 100644 --- a/teleop/utils/rerun_visualizer.py +++ b/teleop/utils/rerun_visualizer.py @@ -188,6 +188,9 @@ if __name__ == "__main__": import gdown import zipfile import os + import logging_mp + logger_mp = logging_mp.get_logger(__name__, level=logging_mp.INFO) + zip_file = "rerun_testdata.zip" zip_file_download_url = "https://drive.google.com/file/d/1f5UuFl1z_gaByg_7jDRj1_NxfJZh2evD/view?usp=sharing" unzip_file_output_dir = "./testdata" @@ -195,16 +198,16 @@ if __name__ == "__main__": if not os.path.exists(zip_file): file_id = zip_file_download_url.split('/')[5] gdown.download(id=file_id, output=zip_file, quiet=False) - print("download ok.") + logger_mp.info("download ok.") if not os.path.exists(unzip_file_output_dir): os.makedirs(unzip_file_output_dir) with zipfile.ZipFile(zip_file, 'r') as zip_ref: zip_ref.extractall(unzip_file_output_dir) - print("uncompress ok.") + logger_mp.info("uncompress ok.") os.remove(zip_file) - print("clean file ok.") + logger_mp.info("clean file ok.") else: - print("rerun_testdata exits.") + logger_mp.info("rerun_testdata exits.") episode_reader = RerunEpisodeReader(task_dir = unzip_file_output_dir) @@ -212,20 +215,20 @@ if __name__ == "__main__": user_input = input("Please enter the start signal (enter 'off' or 'on' to start the subsequent program):\n") if user_input.lower() == 'off': episode_data6 = episode_reader.return_episode_data(6) - print("Starting offline visualization...") + logger_mp.info("Starting offline visualization...") offline_logger = RerunLogger(prefix="offline/") offline_logger.log_episode_data(episode_data6) - print("Offline visualization completed.") + logger_mp.info("Offline visualization completed.") # TEST EXAMPLE 2 : ONLINE DATA TEST, SLIDE WINDOW SIZE IS 60, MEMORY LIMIT IS 50MB if user_input.lower() == 'on': episode_data8 = episode_reader.return_episode_data(8) - print("Starting online visualization with fixed idx size...") + logger_mp.info("Starting online visualization with fixed idx size...") online_logger = RerunLogger(prefix="online/", IdxRangeBoundary = 60, memory_limit='50MB') for item_data in episode_data8: online_logger.log_item_data(item_data) time.sleep(0.033) # 30hz - print("Online visualization completed.") + logger_mp.info("Online visualization completed.") # # TEST DATA OF data_dir @@ -236,9 +239,9 @@ if __name__ == "__main__": # episode_data8 = episode_reader2.return_episode_data(episode_data_number) # if user_input.lower() == 'on': # # Example 2: Offline Visualization with Fixed Time Window - # print("Starting offline visualization with fixed idx size...") + # logger_mp.info("Starting offline visualization with fixed idx size...") # online_logger = RerunLogger(prefix="offline/", IdxRangeBoundary = 60) # for item_data in episode_data8: # online_logger.log_item_data(item_data) # time.sleep(0.033) # 30hz - # print("Offline visualization completed.") \ No newline at end of file + # logger_mp.info("Offline visualization completed.") \ No newline at end of file