Browse Source

EMA head smoothing + increase rotation tracking to 0.7

Fix 1: Replace frozen head_ref with EMA-smoothed head position.
Fast head movements (looking around) are compensated instantly,
but slow body movements (stepping) are absorbed by the EMA filter
so robot arms stay stable. --head-smooth-alpha 0.02 (default).

Fix 2: Increase --rot-blend default from 0.3 to 0.7. With the
rotation I-term now active, we can track 70% of wrist rotation
instead of 30%, producing more natural wrist poses while the
I-term corrects any residual drift.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
main
Joe DiPrima 1 month ago
parent
commit
993ba628d5
  1. 30
      teleop/teleop_hand_and_arm.py

30
teleop/teleop_hand_and_arm.py

@ -154,8 +154,10 @@ if __name__ == '__main__':
help='Disable start-time calibration (use absolute VP poses)') help='Disable start-time calibration (use absolute VP poses)')
parser.add_argument('--settle-time', type=float, default=0.5, parser.add_argument('--settle-time', type=float, default=0.5,
help='Seconds to ramp tracking after calibration (default: 0.5)') help='Seconds to ramp tracking after calibration (default: 0.5)')
parser.add_argument('--rot-blend', type=float, default=0.3,
help='Rotation tracking blend (0=position-only, 1=full rotation, default: 0.3)')
parser.add_argument('--rot-blend', type=float, default=0.7,
help='Rotation tracking blend (0=position-only, 1=full rotation, default: 0.7)')
parser.add_argument('--head-smooth-alpha', type=float, default=0.02,
help='EMA alpha for head position smoothing (0=frozen, 1=no smoothing, default: 0.02)')
# record mode and task info # record mode and task info
parser.add_argument('--record', action = 'store_true', help = 'Enable data recording mode') parser.add_argument('--record', action = 'store_true', help = 'Enable data recording mode')
parser.add_argument('--task-dir', type = str, default = './utils/data/', help = 'path to save data') parser.add_argument('--task-dir', type = str, default = './utils/data/', help = 'path to save data')
@ -354,7 +356,7 @@ if __name__ == '__main__':
vp_ref_right = None vp_ref_right = None
robot_ref_left = None # 4x4: Robot FK pose at calibration moment robot_ref_left = None # 4x4: Robot FK pose at calibration moment
robot_ref_right = None robot_ref_right = None
head_ref = None # 3-vec: Head position at calibration (for decoupling)
head_smooth = None # 3-vec: EMA of head position (for decoupling)
prev_start = False prev_start = False
# Initial calibration: capture reference poses on first START # Initial calibration: capture reference poses on first START
@ -364,13 +366,13 @@ if __name__ == '__main__':
robot_ref_left, robot_ref_right = arm_ik.compute_fk_pose(_cal_arm_q) robot_ref_left, robot_ref_right = arm_ik.compute_fk_pose(_cal_arm_q)
vp_ref_left = _cal_tele.left_wrist_pose.copy() vp_ref_left = _cal_tele.left_wrist_pose.copy()
vp_ref_right = _cal_tele.right_wrist_pose.copy() vp_ref_right = _cal_tele.right_wrist_pose.copy()
head_ref = _cal_tele.head_pose[:3, 3].copy()
head_smooth = _cal_tele.head_pose[:3, 3].copy()
calibrated = True calibrated = True
calibration_time = time.time() calibration_time = time.time()
prev_start = True prev_start = True
logger_mp.info(f"[calibration] Initial reference captured. " logger_mp.info(f"[calibration] Initial reference captured. "
f"Robot L={robot_ref_left[:3,3]}, R={robot_ref_right[:3,3]} " f"Robot L={robot_ref_left[:3,3]}, R={robot_ref_right[:3,3]} "
f"Head={head_ref}")
f"Head={head_smooth}")
# main loop. robot start to follow VR user's motion # main loop. robot start to follow VR user's motion
while not STOP: while not STOP:
@ -426,7 +428,7 @@ if __name__ == '__main__':
robot_ref_left, robot_ref_right = arm_ik.compute_fk_pose(_cal_arm_q) robot_ref_left, robot_ref_right = arm_ik.compute_fk_pose(_cal_arm_q)
vp_ref_left = _cal_tele.left_wrist_pose.copy() vp_ref_left = _cal_tele.left_wrist_pose.copy()
vp_ref_right = _cal_tele.right_wrist_pose.copy() vp_ref_right = _cal_tele.right_wrist_pose.copy()
head_ref = _cal_tele.head_pose[:3, 3].copy()
head_smooth = _cal_tele.head_pose[:3, 3].copy()
calibrated = True calibrated = True
calibration_time = time.time() calibration_time = time.time()
i_error_left = np.zeros(3) i_error_left = np.zeros(3)
@ -525,14 +527,16 @@ if __name__ == '__main__':
settle_elapsed = now - calibration_time settle_elapsed = now - calibration_time
settle_alpha = min(1.0, settle_elapsed / max(args.settle_time, 0.01)) settle_alpha = min(1.0, settle_elapsed / max(args.settle_time, 0.01))
# Head decoupling: tv_wrapper subtracts live head position from wrists.
# Add back head drift so only hand movement (not head movement) drives arms.
head_drift = tele_data.head_pose[:3, 3] - head_ref
# Head decoupling via EMA: tv_wrapper subtracts live head position from wrists.
# We compensate only FAST head movement (looking around), not slow body movement
# (stepping). head_smooth tracks body position; head_fast = head_now - head_smooth.
head_smooth = (head_smooth * (1 - args.head_smooth_alpha)
+ tele_data.head_pose[:3, 3] * args.head_smooth_alpha)
head_fast = tele_data.head_pose[:3, 3] - head_smooth
# Position: robot_ref + settle_alpha * (vp_current + head_drift - vp_ref)
# head_drift cancels tv_wrapper's live head subtraction, replacing it with fixed ref
left_delta_pos = (tele_data.left_wrist_pose[:3, 3] + head_drift) - vp_ref_left[:3, 3]
right_delta_pos = (tele_data.right_wrist_pose[:3, 3] + head_drift) - vp_ref_right[:3, 3]
# Position: robot_ref + settle_alpha * (vp_current + head_fast - vp_ref)
left_delta_pos = (tele_data.left_wrist_pose[:3, 3] + head_fast) - vp_ref_left[:3, 3]
right_delta_pos = (tele_data.right_wrist_pose[:3, 3] + head_fast) - vp_ref_right[:3, 3]
left_wrist_adjusted = np.eye(4) left_wrist_adjusted = np.eye(4)
right_wrist_adjusted = np.eye(4) right_wrist_adjusted = np.eye(4)

Loading…
Cancel
Save