Browse Source

Replace EMA head smoothing with rotation-gated compensation

The EMA approach was mathematically flawed — it amplified stepping
errors by (2-alpha) instead of filtering them. The new approach uses
head angular velocity to distinguish head turns from body stepping:

- Gate opens (compensate) when head rotation > 0.1 rad/s (looking around)
- Gate stays closed (no compensation) when head is still (stepping)
- Head position changes are only accumulated into compensation when
  the gate is open, so tv_wrapper's natural body-movement cancellation
  works correctly during stepping

Also removes --head-smooth-alpha CLI arg (no longer needed).

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
main
Joe DiPrima 1 month ago
parent
commit
35af5bca7c
  1. 49
      teleop/teleop_hand_and_arm.py

49
teleop/teleop_hand_and_arm.py

@ -156,8 +156,6 @@ if __name__ == '__main__':
help='Seconds to ramp tracking after calibration (default: 0.5)') help='Seconds to ramp tracking after calibration (default: 0.5)')
parser.add_argument('--rot-blend', type=float, default=0.7, parser.add_argument('--rot-blend', type=float, default=0.7,
help='Rotation tracking blend (0=position-only, 1=full rotation, default: 0.7)') help='Rotation tracking blend (0=position-only, 1=full rotation, default: 0.7)')
parser.add_argument('--head-smooth-alpha', type=float, default=0.02,
help='EMA alpha for head position smoothing (0=frozen, 1=no smoothing, default: 0.02)')
# record mode and task info # record mode and task info
parser.add_argument('--record', action = 'store_true', help = 'Enable data recording mode') parser.add_argument('--record', action = 'store_true', help = 'Enable data recording mode')
parser.add_argument('--task-dir', type = str, default = './utils/data/', help = 'path to save data') parser.add_argument('--task-dir', type = str, default = './utils/data/', help = 'path to save data')
@ -356,7 +354,9 @@ if __name__ == '__main__':
vp_ref_right = None vp_ref_right = None
robot_ref_left = None # 4x4: Robot FK pose at calibration moment robot_ref_left = None # 4x4: Robot FK pose at calibration moment
robot_ref_right = None robot_ref_right = None
head_smooth = None # 3-vec: EMA of head position (for decoupling)
head_rot_prev = None # 3x3: previous frame head rotation (for gating)
head_pos_prev = None # 3-vec: previous frame head position
head_compensation = np.zeros(3) # accumulated rotation-gated head position correction
prev_start = False prev_start = False
# Initial calibration: capture reference poses on first START # Initial calibration: capture reference poses on first START
@ -366,13 +366,14 @@ if __name__ == '__main__':
robot_ref_left, robot_ref_right = arm_ik.compute_fk_pose(_cal_arm_q) robot_ref_left, robot_ref_right = arm_ik.compute_fk_pose(_cal_arm_q)
vp_ref_left = _cal_tele.left_wrist_pose.copy() vp_ref_left = _cal_tele.left_wrist_pose.copy()
vp_ref_right = _cal_tele.right_wrist_pose.copy() vp_ref_right = _cal_tele.right_wrist_pose.copy()
head_smooth = _cal_tele.head_pose[:3, 3].copy()
head_rot_prev = _cal_tele.head_pose[:3, :3].copy()
head_pos_prev = _cal_tele.head_pose[:3, 3].copy()
head_compensation = np.zeros(3)
calibrated = True calibrated = True
calibration_time = time.time() calibration_time = time.time()
prev_start = True prev_start = True
logger_mp.info(f"[calibration] Initial reference captured. " logger_mp.info(f"[calibration] Initial reference captured. "
f"Robot L={robot_ref_left[:3,3]}, R={robot_ref_right[:3,3]} "
f"Head={head_smooth}")
f"Robot L={robot_ref_left[:3,3]}, R={robot_ref_right[:3,3]}")
# main loop. robot start to follow VR user's motion # main loop. robot start to follow VR user's motion
while not STOP: while not STOP:
@ -386,6 +387,7 @@ if __name__ == '__main__':
i_error_right = np.zeros(3) i_error_right = np.zeros(3)
i_rot_error_left = np.zeros(3) i_rot_error_left = np.zeros(3)
i_rot_error_right = np.zeros(3) i_rot_error_right = np.zeros(3)
head_compensation = np.zeros(3)
arm_ctrl.ctrl_dual_arm(np.zeros(14), np.zeros(14)) arm_ctrl.ctrl_dual_arm(np.zeros(14), np.zeros(14))
reset_start = time.time() reset_start = time.time()
while time.time() - reset_start < 3.0: while time.time() - reset_start < 3.0:
@ -428,7 +430,9 @@ if __name__ == '__main__':
robot_ref_left, robot_ref_right = arm_ik.compute_fk_pose(_cal_arm_q) robot_ref_left, robot_ref_right = arm_ik.compute_fk_pose(_cal_arm_q)
vp_ref_left = _cal_tele.left_wrist_pose.copy() vp_ref_left = _cal_tele.left_wrist_pose.copy()
vp_ref_right = _cal_tele.right_wrist_pose.copy() vp_ref_right = _cal_tele.right_wrist_pose.copy()
head_smooth = _cal_tele.head_pose[:3, 3].copy()
head_rot_prev = _cal_tele.head_pose[:3, :3].copy()
head_pos_prev = _cal_tele.head_pose[:3, 3].copy()
head_compensation = np.zeros(3)
calibrated = True calibrated = True
calibration_time = time.time() calibration_time = time.time()
i_error_left = np.zeros(3) i_error_left = np.zeros(3)
@ -445,6 +449,7 @@ if __name__ == '__main__':
i_error_right = np.zeros(3) i_error_right = np.zeros(3)
i_rot_error_left = np.zeros(3) i_rot_error_left = np.zeros(3)
i_rot_error_right = np.zeros(3) i_rot_error_right = np.zeros(3)
head_compensation = np.zeros(3)
current_time = time.time() current_time = time.time()
time_elapsed = current_time - start_time time_elapsed = current_time - start_time
sleep_time = max(0, (1 / args.frequency) - time_elapsed) sleep_time = max(0, (1 / args.frequency) - time_elapsed)
@ -527,16 +532,26 @@ if __name__ == '__main__':
settle_elapsed = now - calibration_time settle_elapsed = now - calibration_time
settle_alpha = min(1.0, settle_elapsed / max(args.settle_time, 0.01)) settle_alpha = min(1.0, settle_elapsed / max(args.settle_time, 0.01))
# Head decoupling via EMA: tv_wrapper subtracts live head position from wrists.
# We compensate only FAST head movement (looking around), not slow body movement
# (stepping). head_smooth tracks body position; head_fast = head_now - head_smooth.
head_smooth = (head_smooth * (1 - args.head_smooth_alpha)
+ tele_data.head_pose[:3, 3] * args.head_smooth_alpha)
head_fast = tele_data.head_pose[:3, 3] - head_smooth
# Position: robot_ref + settle_alpha * (vp_current + head_fast - vp_ref)
left_delta_pos = (tele_data.left_wrist_pose[:3, 3] + head_fast) - vp_ref_left[:3, 3]
right_delta_pos = (tele_data.right_wrist_pose[:3, 3] + head_fast) - vp_ref_right[:3, 3]
# Head decoupling via rotation gating: only compensate head position
# drift caused by head ROTATION (looking around), not body translation
# (stepping). When head rotates, gate opens and position change is accumulated.
# When head is still, gate stays closed so tv_wrapper's natural body-movement
# cancellation works correctly.
head_rot_now = tele_data.head_pose[:3, :3]
head_pos_now = tele_data.head_pose[:3, 3]
if head_rot_prev is not None:
rot_delta = rotation_to_rotvec(head_rot_now @ head_rot_prev.T)
angular_speed = np.linalg.norm(rot_delta) / max(dt, 0.001)
# Gate: 0 when head still (stepping), 1 when rotating (looking around)
gate = np.clip((angular_speed - 0.1) / 0.3, 0.0, 1.0)
pos_delta = head_pos_now - head_pos_prev
head_compensation += gate * pos_delta
head_rot_prev = head_rot_now.copy()
head_pos_prev = head_pos_now.copy()
# Position: robot_ref + settle_alpha * (vp_current + head_comp - vp_ref)
left_delta_pos = (tele_data.left_wrist_pose[:3, 3] + head_compensation) - vp_ref_left[:3, 3]
right_delta_pos = (tele_data.right_wrist_pose[:3, 3] + head_compensation) - vp_ref_right[:3, 3]
left_wrist_adjusted = np.eye(4) left_wrist_adjusted = np.eye(4)
right_wrist_adjusted = np.eye(4) right_wrist_adjusted = np.eye(4)

Loading…
Cancel
Save