Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Skill test jimmy tushar waypoint from plane #201

Draft
wants to merge 22 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -26,3 +26,8 @@ spot_rl_experiments/experiments/skill_test/logs/*
spot_rl_experiments/experiments/skill_test/table_detections/*
ros_tcp/ros_communication_client.egg-info/**
*.npy
*.csv
*.json
*.pkl
*.gz
*.pt
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,6 @@
gripper_img_src = [SpotCamIds.HAND_COLOR]


MIN_LIN_DIST_THRESHOLD_BETWEEN_INTEL_AND_GRIPPER = (
0.115 # 13cm is approx physical dist between gripper & intel (13 - 2.5 = 11.5)
)
MAX_LIN_DIST_THRESHOLD_BETWEEN_INTEL_AND_GRIPPER = (
0.155 # 13cm is approx physical dist between gripper & intel (13 + 2.5 = 15.5)
)


def get_intel_image(spot: Spot):
return spot.get_image_responses(intel_img_src, quality=100, await_the_resp=False) # type: ignore

Expand All @@ -45,9 +37,8 @@ def get_gripper_image(spot: Spot):
gripper_intrinsics = gripper_response.source.pinhole.intrinsics
gripper_image: np.ndarray = image_response_to_cv2(gripper_response)

outputs_intel = {}
aprilposeestimator_intel: AprilTagDetectorWrapper = AprilTagDetectorWrapper()
outputs_intel = aprilposeestimator_intel._init_april_tag_detector(
aprilposeestimator_intel._init_april_tag_detector(
focal_lengths=[
intel_intrinsics.focal_length.x,
intel_intrinsics.focal_length.y,
Expand All @@ -56,12 +47,11 @@ def get_gripper_image(spot: Spot):
intel_intrinsics.principal_point.x,
intel_intrinsics.principal_point.y,
],
verbose=True,
verbose=False,
)

outputs_gripper = {}
aprilposeestimator_gripper: AprilTagDetectorWrapper = AprilTagDetectorWrapper()
outputs_gripper = aprilposeestimator_gripper._init_april_tag_detector(
aprilposeestimator_gripper._init_april_tag_detector(
focal_lengths=[
gripper_intrinsics.focal_length.x,
gripper_intrinsics.focal_length.y,
Expand All @@ -70,71 +60,31 @@ def get_gripper_image(spot: Spot):
gripper_intrinsics.principal_point.x,
gripper_intrinsics.principal_point.y,
],
verbose=True,
verbose=False,
)
prev_diff = np.zeros((4, 4), dtype=np.float32)
while True:
intel_image = get_intel_image(spot)
gripper_image = get_gripper_image(spot)
intel_image = image_response_to_cv2(intel_image.result()[0])
gripper_image = image_response_to_cv2(gripper_image.result()[0])

intel_image, intel_T_marker = aprilposeestimator_intel.process_frame(
intel_image
)
if intel_T_marker is not None:
intel_image, outputs = aprilposeestimator_intel.get_outputs(
img_frame=intel_image,
outputs=outputs_intel,
base_T_marker=intel_T_marker,
timestamp=0,
img_metadata=None,
)

gripper_image, gripper_T_marker = aprilposeestimator_gripper.process_frame(
gripper_image
)
if gripper_T_marker is not None:
gripper_image, outputs = aprilposeestimator_gripper.get_outputs(
img_frame=gripper_image,
outputs=outputs_gripper,
base_T_marker=gripper_T_marker,
timestamp=0,
img_metadata=None,
)

if intel_T_marker is not None and gripper_T_marker is not None:

# Make sure euclidean distance between gripper & intelRS is 13cm +/- 2.5 cm . (13cm is the approx physical distance)
marker_T_intel = intel_T_marker.inverse()
gripper_T_intel = gripper_T_marker * marker_T_intel
if (
np.linalg.norm(gripper_T_intel.matrix()[:3, 3])
< MAX_LIN_DIST_THRESHOLD_BETWEEN_INTEL_AND_GRIPPER
) and (
np.linalg.norm(gripper_T_intel.matrix()[:3, 3])
> MIN_LIN_DIST_THRESHOLD_BETWEEN_INTEL_AND_GRIPPER
):
print(
f"^^^^^^^^^^^^^^VALID DISTANCE^^^^^^^^^^^^^^^^^ - {np.linalg.norm(gripper_T_intel.matrix()[:3, 3])}"
)

# Wait for consistent reading between 2 frames
err = gripper_T_intel.matrix() - prev_diff
print(f"error {err} \n")
prev_diff = gripper_T_intel.matrix()
save_tf = rospy.get_param("is_save", 0) == 1
if save_tf and not np.any(err):
print("Will Save")
np.save("gripper_T_intel.npy", gripper_T_intel.matrix())
break
else:
print(
f"xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx INVALID DISTANCE - {np.linalg.norm(gripper_T_intel.matrix()[:3, 3])}"
)
else:
print("Intel : ", intel_T_marker is None)
print("Gripper : ", gripper_T_marker is None)

print(
f"Check if the z is close to each other -- gripper_T_marker: {gripper_T_marker.matrix()[:3, 3]}, intel_T_marker: {intel_T_marker.matrix()[:3, 3]}"
)
cv2.imshow("QR detection", np.hstack((intel_image, gripper_image)))
marker_T_intel = intel_T_marker.inverse()
gripper_T_intel = (gripper_T_marker * marker_T_intel).matrix()
err = gripper_T_intel - prev_diff
print(f"error {err} \n")
prev_diff = gripper_T_intel
save_tf = rospy.get_param("is_save", 0) == 1
if save_tf and not np.any(err):
np.save("gripper_T_intel.npy", gripper_T_intel)
break
cv2.waitKey(1)
4 changes: 2 additions & 2 deletions spot_rl_experiments/configs/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ MAX_EPISODE_STEPS: 100
SUCCESS_DISTANCE: 0.3
SUCCESS_ANGLE_DIST: 5
SUCCESS_DISTANCE_FOR_DYNAMIC_YAW_NAV: 1.10
SUCCESS_ANGLE_DIST_FOR_DYNAMIC_YAW_NAV: 20
SUCCESS_ANGLE_DIST_FOR_DYNAMIC_YAW_NAV: 180
DISABLE_OBSTACLE_AVOIDANCE: False
USE_OA_FOR_NAV: True
USE_HEAD_CAMERA: True
Expand Down Expand Up @@ -136,7 +136,7 @@ INITIAL_ARM_JOINT_ANGLES_SEMANTIC_PLACE_LEFT_HAND: [0, -180, 180, 0, 0, -90] # T
INITIAL_ARM_JOINT_ANGLES_SEMANTIC_PLACE_TOP_DOWN: [-0.91, -92.87, 91.21, 0, 90.01, 0] # The initial orientation of the arm for top down grasping
# The old gaze ready angle: [0, -170, 120, 0, 75, 0]
GAZE_ARM_JOINT_ANGLES: [0, -160, 100, 0, 75, 0] #[0, -160, 100, 0, 90, 0]
SEMANTIC_PLACE_ARM_JOINT_ANGLES: [0, -125, 80, 0, 85, 0]
SEMANTIC_PLACE_ARM_JOINT_ANGLES: [0, -150, 80, 0, 75, 0] #[0, -125, 80, 0, 85, 0]
PLACE_ARM_JOINT_ANGLES: [0, -170, 120, 0, 75, 0]
ARM_LOWER_LIMITS: [-45, -180, 0, 0, -90, 0]
ARM_UPPER_LIMITS: [45, 0, 180, 0, 90, 0]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@
from spot_rl.utils.utils import ros_topics as rt
from spot_wrapper.utils import get_angle_between_two_vectors

NUM_REPEAT = 5
WAYPOINT_TEST = [[1.8, 1.1]] * NUM_REPEAT # x, y
NUM_REPEAT = 1
WAYPOINT_TEST = [[4.5, 3.4, -0.0]] * NUM_REPEAT # x, y


class SpotRosSkillExecutor:
Expand Down Expand Up @@ -65,8 +65,8 @@ def benchmark(self):
self.spotskillmanager = SpotSkillManager(
use_mobile_pick=True, use_semantic_place=False
)
x, y = waypoint
suc, _ = self.spotskillmanager.nav(x, y)
x, y, yaw = waypoint
suc, _ = self.spotskillmanager.nav(x, y, np.deg2rad(yaw))
# Compute the metrics
traj = (
self.spotskillmanager.nav_controller.get_most_recent_result_log().get(
Expand All @@ -76,6 +76,7 @@ def benchmark(self):
metrics = self.compute_metrics(traj, np.array([x, y]))
metrics["suc"] = suc
metrics_list.append(metrics)
breakpoint()
# Reset
self.spotskillmanager.dock()

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
# Copyright (c) Meta Platforms, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.

import argparse
import os
import os.path as osp
import time

import magnum as mn
import numpy as np
import rospy
from spot_rl.envs.skill_manager import SpotSkillManager
from spot_rl.utils.utils import ros_topics as rt
from spot_wrapper.utils import get_angle_between_two_vectors

NUM_REPEAT = 2
WAYPOINT_TEST = [[5.4, 2.5]] * NUM_REPEAT # x, y


class SpotRosSkillExecutor:
"""This class reads the ros butter to execute skills"""

def __init__(self):
self.spotskillmanager = None
self._cur_skill_name_input = None

def compute_metrics(self, traj, target_point):
""" "Compute the metrics"""
num_steps = len(traj)
final_point = np.array(traj[-1]["pose"][0:2])
distance = np.linalg.norm(target_point - final_point)
# Compute the angle
vector_robot_to_target = target_point - final_point
vector_robot_to_target = vector_robot_to_target / np.linalg.norm(
vector_robot_to_target
)
vector_forward_robot = np.array(
self.spotskillmanager.get_env().curr_transform.transform_vector(
mn.Vector3(1, 0, 0)
)
)[[0, 1]]
vector_forward_robot = vector_forward_robot / np.linalg.norm(
vector_forward_robot
)
dot_product_facing_target = abs(
np.dot(vector_robot_to_target, vector_forward_robot)
)
angle_facing_target = abs(
get_angle_between_two_vectors(vector_robot_to_target, vector_forward_robot)
)
return {
"num_steps": num_steps,
"distance": distance,
"dot_product_facing_target": dot_product_facing_target,
"angle_facing_target": angle_facing_target,
}

def benchmark(self):
""" "Run the benchmark code to test skills"""

metrics_list = []
for waypoint in WAYPOINT_TEST:
# Call the skill manager
self.spotskillmanager = SpotSkillManager(
use_mobile_pick=True, use_semantic_place=False
)
x, y = waypoint
suc, _ = self.spotskillmanager.nav(x, y)
# Compute the metrics
traj = (
self.spotskillmanager.nav_controller.get_most_recent_result_log().get(
"robot_trajectory"
)
)
metrics = self.compute_metrics(traj, np.array([x, y]))
metrics["nav_suc"] = suc
if suc: # iff nav succeeds
suc, _ = self.spotskillmanager.pick("can")
else:
suc = False
time.sleep(0.5)
self.spotskillmanager.spot.open_gripper()
time.sleep(0.5)
metrics["pick_suc"] = suc
metrics_list.append(metrics)
# Reset
self.spotskillmanager.dock()

# Compute the final number
for mm in metrics:
data = [metrics[mm] for metrics in metrics_list]
_mean = round(np.mean(data), 2)
_std = round(np.std(data), 2)
print(f"{mm}: {_mean} +- {_std}")


def main():
parser = argparse.ArgumentParser()
parser.add_argument("--useful_parameters", action="store_true")
_ = parser.parse_args()

executor = SpotRosSkillExecutor()
executor.benchmark()


if __name__ == "__main__":
main()
Loading