Skip to content

Commit

Permalink
adds pouring skill
Browse files Browse the repository at this point in the history
  • Loading branch information
NishanthJKumar committed May 28, 2024
1 parent c4b6552 commit 0f82df9
Showing 1 changed file with 55 additions and 30 deletions.
85 changes: 55 additions & 30 deletions predicators/spot_utils/sample_and_move.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,37 +119,62 @@ def reward_function(input_traj: List[Tuple[float, float]]) -> float:

# relative arm move example.
target_pos = math_helpers.Vec3(0.8, 0.0, 0.25)
downward_angle = np.pi / 2.5
rot = math_helpers.Quat.from_pitch(downward_angle)
body_tform_goal = math_helpers.SE3Pose(x=target_pos.x,
y=target_pos.y,
z=target_pos.z,
rot=rot)
move_hand_to_relative_pose(robot, body_tform_goal)
time.sleep(0.5)

# grasping example.
rgbds = capture_images(robot, localizer, TEST_CAMERAS)
language_ids: List[ObjectDetectionID] = [
LanguageObjectDetectionID(d) for d in TEST_LANGUAGE_DESCRIPTIONS
]
hand_camera = "hand_color_image"
detections, artifacts = detect_objects(language_ids, rgbds)

detections_outfile = Path(".") / "object_detection_artifacts.png"
no_detections_outfile = Path(".") / "no_detection_artifacts.png"
visualize_all_artifacts(artifacts, detections_outfile,
no_detections_outfile)

# Automatically get grasp pixel via vision
# pixel, _ = get_grasp_pixel(rgbds, artifacts, language_ids[-1],
# hand_camera, rng)
# Get grasp pixel via user query.
pixel = get_pixel_from_user(rgbds[hand_camera].rgb)
grasp_at_pixel(robot, rgbds[hand_camera], pixel)
stow_arm(robot)
# downward_angle = np.pi / 2.5
# rot = math_helpers.Quat.from_pitch(downward_angle)
# body_tform_goal = math_helpers.SE3Pose(x=target_pos.x,
# y=target_pos.y,
# z=target_pos.z,
# rot=rot)
# move_hand_to_relative_pose(robot, body_tform_goal)
# time.sleep(0.5)

# # grasping example.
# rgbds = capture_images(robot, localizer, TEST_CAMERAS)
# language_ids: List[ObjectDetectionID] = [
# LanguageObjectDetectionID(d) for d in TEST_LANGUAGE_DESCRIPTIONS
# ]
# hand_camera = "hand_color_image"
# detections, artifacts = detect_objects(language_ids, rgbds)

# detections_outfile = Path(".") / "object_detection_artifacts.png"
# no_detections_outfile = Path(".") / "no_detection_artifacts.png"
# visualize_all_artifacts(artifacts, detections_outfile,
# no_detections_outfile)

# # Automatically get grasp pixel via vision
# # pixel, _ = get_grasp_pixel(rgbds, artifacts, language_ids[-1],
# # hand_camera, rng)
# # Get grasp pixel via user query.
# pixel = get_pixel_from_user(rgbds[hand_camera].rgb)
# grasp_at_pixel(robot, rgbds[hand_camera], pixel)
# stow_arm(robot)

# move to waypoint navigate_to_absolute_pose(robot, localizer, se2_pose(x, y, theta)) x, y relative to map (theta is dir of robot)
# move_hand_to_relative_pose(robot, se3_pose(x, y, z, quaternion)) # x is forward from robot, y is right from robot, and z is up from robot
# get_grasp_pixel(robot, ) -> returns pixel to grasp
# grasp_at_pixel(robot, rgbds[hand_camera], pixel) -> grasps at pixel
# grasp_at_pixel(robot, rgbds[hand_camera], pixel) -> grasps at pixel




# Pouring skill.
def pour_at_relative_pose(robot: Robot, rel_pos: math_helpers.Vec3) -> None:
"""Assuming the robot is holding something, execute a pour."""
# First move the hand to the target pose.
init_rot = math_helpers.Quat.from_pitch(np.pi/2)
init_pose = math_helpers.SE3Pose(x=rel_pos.x,
y=rel_pos.y,
z=rel_pos.z,
rot=init_rot)
move_hand_to_relative_pose(robot, init_pose)
time.sleep(0.5)
pouring_rot = init_rot * math_helpers.Quat.from_yaw(np.pi / 4.0)
pouring_pose = math_helpers.SE3Pose(x=rel_pos.x,
y=rel_pos.y,
z=rel_pos.z,
rot=pouring_rot)
move_hand_to_relative_pose(robot, pouring_pose)
time.sleep(0.5)
move_hand_to_relative_pose(robot, init_pose)

pour_at_relative_pose(robot, target_pos)

0 comments on commit 0f82df9

Please sign in to comment.