diff --git a/habitat-lab/habitat/articulated_agent_controllers/__init__.py b/habitat-lab/habitat/articulated_agent_controllers/__init__.py index 5b673ff2dc..207bc2088a 100644 --- a/habitat-lab/habitat/articulated_agent_controllers/__init__.py +++ b/habitat-lab/habitat/articulated_agent_controllers/__init__.py @@ -4,10 +4,20 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. +from habitat.articulated_agent_controllers.humanoid_base_controller import ( + HumanoidBaseController, + Motion, + Pose, +) from habitat.articulated_agent_controllers.humanoid_rearrange_controller import ( HumanoidRearrangeController, ) +from habitat.articulated_agent_controllers.humanoid_seq_pose_controller import ( + HumanoidSeqPoseController, +) __all__ = [ + "HumanoidBaseController", "HumanoidRearrangeController", + "HumanoidSeqPoseController", ] diff --git a/habitat-lab/habitat/articulated_agent_controllers/humanoid_base_controller.py b/habitat-lab/habitat/articulated_agent_controllers/humanoid_base_controller.py new file mode 100644 index 0000000000..3ec013300e --- /dev/null +++ b/habitat-lab/habitat/articulated_agent_controllers/humanoid_base_controller.py @@ -0,0 +1,93 @@ +#!/usr/bin/env python3 + +# Copyright (c) Meta Platforms, Inc. and its affiliates. +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + + +import magnum as mn +import numpy as np + + +# TODO: The implementation here assumes a SMPLX representation of humanoids +# where all joints are representated as quaternions. In future PRs we need +# to abstract this class to support other kinds of joints. +class Pose: + def __init__(self, joints_quat, root_transform): + """ + Contains a single humanoid pose + :param joints_quat: list or array of num_joints * 4 elements, with the rotation quaternions + :param root_transform: Matrix4 with the root trasnform. + """ + self.joints = list(joints_quat) + self.root_transform = root_transform + + +class Motion: + """ + Contains a sequential motion, corresponding to a sequence of poses + :param joints_quat_array: num_poses x num_joints x 4 array, containing the join orientations + :param transform_array: num_poses x 4 x 4 array, containing the root transform + :param displacement: on each pose, how much forward displacement was there? + Used to measure how many poses we should advance to move a cerain amount + :param fps: the FPS at which the motion was recorded + """ + + def __init__(self, joints_quat_array, transform_array, displacement, fps): + num_poses = joints_quat_array.shape[0] + self.num_poses = num_poses + poses = [] + for index in range(num_poses): + pose = Pose( + joints_quat_array[index].reshape(-1), + mn.Matrix4(transform_array[index]), + ) + poses.append(pose) + + self.poses = poses + self.fps = fps + self.displacement = displacement + + +class HumanoidBaseController: + """ + Generic class to replay SMPL-X motions + :param motion_fps: the FPS at which we should be playing the motion. + :param base_offset: what is the offset between the root of the character and their feet. + """ + + def __init__( + self, + motion_fps=30, + base_offset=(0, 0.9, 0), + ): + self.base_offset = mn.Vector3(base_offset) + self.motion_fps = motion_fps + + # These two matrices store the global transformation of the base + # as well as the transformation caused by the walking gait + # We initialize them to identity + self.obj_transform_offset = mn.Matrix4() + self.obj_transform_base = mn.Matrix4() + self.joint_pose = [] + + def reset(self, base_transformation: mn.Matrix4) -> None: + """Reset the joints on the human. (Put in rest state)""" + self.obj_transform_offset = mn.Matrix4() + self.obj_transform_base = base_transformation + self.prev_orientation = base_transformation.transform_vector( + mn.Vector3(1.0, 0.0, 0.0) + ) + + def get_pose(self): + """ + Obtains the controller joints, offset and base transform in a vectorized form so that it can be passed + as an argument to HumanoidJointAction + """ + obj_trans_offset = np.asarray( + self.obj_transform_offset.transposed() + ).flatten() + obj_trans_base = np.asarray( + self.obj_transform_base.transposed() + ).flatten() + return self.joint_pose + list(obj_trans_offset) + list(obj_trans_base) diff --git a/habitat-lab/habitat/articulated_agent_controllers/humanoid_rearrange_controller.py b/habitat-lab/habitat/articulated_agent_controllers/humanoid_rearrange_controller.py index 88010c0ce3..1153e80267 100644 --- a/habitat-lab/habitat/articulated_agent_controllers/humanoid_rearrange_controller.py +++ b/habitat-lab/habitat/articulated_agent_controllers/humanoid_rearrange_controller.py @@ -11,43 +11,11 @@ import magnum as mn import numpy as np - -class Pose: - def __init__(self, joints_quat, root_transform): - """ - Contains a single humanoid pose - :param joints_quat: list or array of num_joints * 4 elements, with the rotation quaternions - :param root_transform: Matrix4 with the root trasnform. - """ - self.joints = list(joints_quat) - self.root_transform = root_transform - - -class Motion: - """ - Contains a sequential motion, corresponding to a sequence of poses - :param joints_quat_array: num_poses x num_joints x 4 array, containing the join orientations - :param transform_array: num_poses x 4 x 4 array, containing the root transform - :param displacement: on each pose, how much forward displacement was there? - Used to measure how many poses we should advance to move a cerain amount - :param fps: the FPS at which the motion was recorded - """ - - def __init__(self, joints_quat_array, transform_array, displacement, fps): - num_poses = joints_quat_array.shape[0] - self.num_poses = num_poses - poses = [] - for index in range(num_poses): - pose = Pose( - joints_quat_array[index].reshape(-1), - mn.Matrix4(transform_array[index]), - ) - poses.append(pose) - - self.poses = poses - self.fps = fps - self.displacement = displacement - +from habitat.articulated_agent_controllers import ( + HumanoidBaseController, + Motion, + Pose, +) MIN_ANGLE_TURN = 5 # If we turn less than this amount, we can just rotate the base and keep walking motion the same as if we had not rotated TURNING_STEP_AMOUNT = ( @@ -59,24 +27,26 @@ def __init__(self, joints_quat_array, transform_array, displacement, fps): ) -class HumanoidRearrangeController: +class HumanoidRearrangeController(HumanoidBaseController): """ Humanoid Controller, converts high level actions such as walk, or reach into joints positions :param walk_pose_path: file containing the walking poses we care about. - :param draw_fps: the FPS at which we should be advancing the pose. + :param motion_fps: the FPS at which we should be advancing the pose. :base_offset: what is the offset between the root of the character and their feet. """ def __init__( self, walk_pose_path, - draw_fps=30, + motion_fps=30, base_offset=(0, 0.9, 0), ): + self.obj_transform_base = mn.Matrix4() + super().__init__(motion_fps, base_offset) + self.min_angle_turn = MIN_ANGLE_TURN self.turning_step_amount = TURNING_STEP_AMOUNT self.threshold_rotate_not_move = THRESHOLD_ROTATE_NOT_MOVE - self.base_offset = mn.Vector3(base_offset) if not os.path.isfile(walk_pose_path): raise RuntimeError( @@ -98,18 +68,11 @@ def __init__( walk_data["stop_pose"]["joints"].reshape(-1), mn.Matrix4(walk_data["stop_pose"]["transform"]), ) - self.draw_fps = draw_fps + self.motion_fps = motion_fps self.dist_per_step_size = ( self.walk_motion.displacement[-1] / self.walk_motion.num_poses ) - # These two matrices store the global transformation of the base - # as well as the transformation caused by the walking gait - # We initialize them to identity - self.obj_transform_offset = mn.Matrix4() - self.obj_transform_base = mn.Matrix4() - self.joint_pose = [] - self.prev_orientation = None self.walk_mocap_frame = 0 @@ -147,20 +110,12 @@ def set_framerate_for_linspeed(self, lin_speed, ang_speed, ctrl_freq): seconds_per_step = 1.0 / ctrl_freq meters_per_step = lin_speed * seconds_per_step frames_per_step = meters_per_step / self.dist_per_step_size - self.draw_fps = self.walk_motion.fps / frames_per_step + self.motion_fps = self.walk_motion.fps / frames_per_step rotate_amount = ang_speed * seconds_per_step rotate_amount = rotate_amount * 180.0 / np.pi self.turning_step_amount = rotate_amount self.threshold_rotate_not_move = rotate_amount - def reset(self, base_transformation) -> None: - """Reset the joints on the human. (Put in rest state)""" - self.obj_transform_offset = mn.Matrix4() - self.obj_transform_base = base_transformation - self.prev_orientation = base_transformation.transform_vector( - mn.Vector3(1.0, 0.0, 0.0) - ) - def calculate_stop_pose(self): """ Calculates a stop, standing pose @@ -226,7 +181,7 @@ def calculate_walk_pose( self.prev_orientation = forward_V # Step size according to the FPS - step_size = int(self.walk_motion.fps / self.draw_fps) + step_size = int(self.walk_motion.fps / self.motion_fps) if did_rotate: # When we rotate, we allow some movement @@ -493,16 +448,3 @@ def calculate_reach_pose(self, obj_pos: mn.Vector3, index_hand=0): ) self.joint_pose = curr_poses - - def get_pose(self): - """ - Obtains the controller joints, offset and base transform in a vectorized form so that it can be passed - as an argument to HumanoidJointAction - """ - obj_trans_offset = np.asarray( - self.obj_transform_offset.transposed() - ).flatten() - obj_trans_base = np.asarray( - self.obj_transform_base.transposed() - ).flatten() - return self.joint_pose + list(obj_trans_offset) + list(obj_trans_base) diff --git a/habitat-lab/habitat/articulated_agent_controllers/humanoid_seq_pose_controller.py b/habitat-lab/habitat/articulated_agent_controllers/humanoid_seq_pose_controller.py new file mode 100644 index 0000000000..0bcaceea80 --- /dev/null +++ b/habitat-lab/habitat/articulated_agent_controllers/humanoid_seq_pose_controller.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python3 + +# Copyright (c) Meta Platforms, Inc. and its affiliates. +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import os + +import magnum as mn +import numpy as np + +from habitat.articulated_agent_controllers import ( + HumanoidBaseController, + Motion, +) + + +class HumanoidSeqPoseController(HumanoidBaseController): + """ + Humanoid Seq Pose Controller, replays a sequence of humanoid poses. + :param motion_pose_path: file containing the motion poses we want to play. + :param motion_fps: the FPS at which we should be advancing the pose. + :param base_offset: what is the offset between the root of the character and their feet. + """ + + def __init__( + self, + motion_pose_path, + motion_fps=30, + base_offset=(0, 0.9, 0), + ): + super().__init__(motion_fps, base_offset) + + if not os.path.isfile(motion_pose_path): + raise RuntimeError( + f"Path does {motion_pose_path} not exist. Reach out to the paper authors to obtain this data." + ) + + motion_info = np.load(motion_pose_path, allow_pickle=True) + motion_info = motion_info["pose_motion"] + self.humanoid_motion = Motion( + motion_info["joints_array"], + motion_info["transform_array"], + motion_info["displacement"], + motion_info["fps"], + ) + self.motion_frame = 0 + self.ref_pose = mn.Matrix4() + self.first_pose = mn.Matrix4( + self.humanoid_motion.poses[0].root_transform + ) + self.step_size = int(self.humanoid_motion.fps / self.motion_fps) + + def reset(self, base_transformation: mn.Matrix4) -> None: + """Reset the joints on the human. (Put in rest state)""" + super().reset(base_transformation) + self.motion_frame = 0 + + # The first pose of the motion file will be set at base_transformation + self.ref_pose = base_transformation + + self.calculate_pose() + + def next_pose(self, cycle=False) -> None: + """ + Move to the next pose in the motion sequence + :param cycle: boolean indicating whether we should stop or cycle when reaching the last pose + """ + + if cycle: + self.motion_frame = ( + self.motion_frame + self.step_size + ) % self.humanoid_motion.num_poses + else: + self.motion_frame = min( + self.motion_frame + 1, self.humanoid_motion.num_poses - 1 + ) + + def prev_pose(self, cycle=False) -> None: + """ + Move to the previous pose in the motion sequence + :param cycle: boolean indicating whether we should stop or cycle when reaching the first pose + """ + + if cycle: + self.motion_frame = ( + self.motion_frame - self.step_size + ) % self.humanoid_motion.num_poses + else: + self.motion_frame = max(0, self.motion_frame - 1) + + def calculate_pose(self, advance_pose=False) -> None: + """ + Set the joint transforms according to the current frame + :param advance_pose: whether this function should move to the next pose + """ + curr_transform = mn.Matrix4( + self.humanoid_motion.poses[self.motion_frame].root_transform + ) + curr_transform.translation = ( + curr_transform.translation + - self.first_pose.translation + + self.ref_pose.translation + ) + curr_transform.translation = curr_transform.translation - mn.Vector3( + 0, 0.9, 0 + ) + curr_poses = self.humanoid_motion.poses[self.motion_frame].joints + self.obj_transform_offset = curr_transform + self.joint_pose = curr_poses + + if advance_pose: + self.next_pose() diff --git a/habitat-lab/habitat/articulated_agents/humanoids/README.md b/habitat-lab/habitat/articulated_agents/humanoids/README.md index ddfff0b823..7c40339c09 100644 --- a/habitat-lab/habitat/articulated_agents/humanoids/README.md +++ b/habitat-lab/habitat/articulated_agents/humanoids/README.md @@ -1,7 +1,7 @@ Humanoid Design ============================== -Habitat also supports humanoid avatars. These avatars are represented as articulated objects connected via spherical or fixed joints. The current version only includes a kinematic humanoid, which can be controlled by specifying the joint rotations and object transformation. +Habitat also supports humanoid avatars. These avatars are represented as articulated objects connected via spherical or fixed joints. The current version only includes a kinematic humanoid, which can be controlled by specifying the joint rotations and object transformations. You can download the humanoid avatar by running: @@ -19,7 +19,7 @@ From the home directory. - `kinematic_humanoid.py` is built upon `mobile_manipulator.py` and can be controlled by setting the joint and object transforms, via the function `set_joint_transform`. 1. **Mobile Manipulator Parameters** - - Parameters such as the camera's transformation, end-effector position, control gain, and more are specified in each robot class (e.g., here, `kinematic_humanoid.py`). + - Parameters such as the camera's transformation, end-effector position, control gain, and more are specified in each humanoid class (e.g., here, `kinematic_humanoid.py`). 1. **Humanoid Articulated Object** @@ -32,3 +32,39 @@ You can test a simple demo where the humanoid moves their arms at random using: ``` python examples/interactive_play.py --never-end --disable-inverse-kinematics --control-humanoid --cfg benchmark/rearrange/play_human.yaml ``` + +## Controlling the humanoid + +While you can control the humanoid avatars by directly setting joint rotations, you may be interested in easily generating realistic humanoid motion. We provide a set of controllers that convert high level commands into low-level controls. We currently provide two controllers, as described below: + +### HumanoidRearrangeController + +The [HumanoidRearrangeController](../../articulated_agent_controllers/humanoid_rearrange_controller.py) controller, designed to drive the humanoid to navigate around a scene, or pick and place objects. This controller is used for the Social Navigation and Social Rearrangement tasks. It allows generation of walk cycle animations, that drive the agent to particular object positions, and reaching motions, which change the agent pose so that the right or left hand reaches a specific realtive position. You can test the controller by running: + +``` +python -m pytest test/test_humanoid.py:test_humanoid_controller +``` +This will generate an animation of a human walking on an empty plane. + +### SequentialPoseController + +The [SequentialPoseController](../../articulated_agent_controllers/seq_pose_controller.py), designed to replay a pre-computed motion data file feed-forward. For example, such motions could originate from motion capture, an animation system, or a motion generation model. You can test the controller by running: + + +``` +python -m pytest test/test_humanoid.py:test_humanoid_seqpose_controller +``` + +We also provide [a script](../../../habitat/utils/humanoid_utils.py) to convert motion from a SMPL-X format to a file that can be played in our controller. You can run it as follows: + +```python +PATH_TO_URDF = "data/humanoids/humanoid_data/female_2/female_2.urdf" +PATH_TO_MOTION_NPZ = "data/humanoids/humanoid_data/walk_motion/CMU_10_04_stageii.npz" +convert_helper = MotionConverterSMPLX(urdf_path=PATH_TO_URDF) +convert_helper.convert_motion_file( + motion_path=PATH_TO_MOTION_NPZ, + output_path=PATH_TO_MOTION_NPZ.replace(".npz", ""), +) +``` + +Where `PATH_TO_MOTION_NPZ` contains the motion in SMPL-X format (e.g. from AMASS or [Motion Diffusion Models](https://github.com/GuyTevet/motion-diffusion-model)), and the output file will be a `.pkl` that can be input to the SequentialPoseController. diff --git a/habitat-lab/habitat/utils/humanoid_utils.py b/habitat-lab/habitat/utils/humanoid_utils.py new file mode 100644 index 0000000000..28510f83db --- /dev/null +++ b/habitat-lab/habitat/utils/humanoid_utils.py @@ -0,0 +1,219 @@ +#!/usr/bin/env python3 + +# Copyright (c) Meta Platforms, Inc. and its affiliates. +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import glob +import pickle as pkl + +import magnum as mn +import numpy as np +import pybullet as p + +smplx_body_joint_names = [ + "pelvis", + "left_hip", + "right_hip", + "spine1", + "left_knee", + "right_knee", + "spine2", + "left_ankle", + "right_ankle", + "spine3", + "left_foot", + "right_foot", + "neck", + "left_collar", + "right_collar", + "head", + "left_shoulder", + "right_shoulder", + "left_elbow", + "right_elbow", + "left_wrist", + "right_wrist", +] + + +def global_correction_quat( + up_v: mn.Vector3, forward_v: mn.Vector3 +) -> mn.Quaternion: + """ + Given the upward direction and the forward direction of a local space frame, this method produces + the correction quaternion to convert the frame to global space (+Y up, -Z forward). + """ + if up_v.normalized() != mn.Vector3.y_axis(): + angle1 = mn.math.angle(up_v.normalized(), mn.Vector3.y_axis()) + axis1 = mn.math.cross(up_v.normalized(), mn.Vector3.y_axis()) + rotation1 = mn.Quaternion.rotation(angle1, axis1) + forward_v = rotation1.transform_vector(forward_v) + else: + rotation1 = mn.Quaternion() + + forward_v = forward_v * (mn.Vector3(1.0, 1.0, 1.0) - mn.Vector3.y_axis()) + angle2 = mn.math.angle(forward_v.normalized(), -1 * mn.Vector3.z_axis()) + axis2 = mn.Vector3.y_axis() + rotation2 = mn.Quaternion.rotation(angle2, axis2) + + return rotation2 * rotation1 + + +class MotionConverterSMPLX: + """ + Human Controller, converts high level actions such as walk, or reach into joints + to control a URDF object. + """ + + def __init__(self, urdf_path): + self.index_joint_map = { + joint_name: index_joint + for index_joint, joint_name in enumerate(smplx_body_joint_names) + } + self.pc_id = p.connect(p.DIRECT) + self.human_bullet_id = p.loadURDF(urdf_path) + + self.link_ids = list(range(p.getNumJoints(self.human_bullet_id))) + self.joint_info = [ + p.getJointInfo(self.human_bullet_id, index) + for index in self.link_ids + ] + self.final_rotation_correction = global_correction_quat( + mn.Vector3.z_axis(), mn.Vector3.x_axis() + ) + + def convert_pose_to_rotation( + self, root_translation, root_orientation, pose_joints + ): + """ + Converts a single pose from SMPL-X format to Habitat format. The input pose assumes that + character faces +X and Z is up. + The output pose is converted so that the character faces -Z and +Y is up. + :param root_translation: The global pose translation, measured as the position of the pelvis + :param root_orientation: The global root orientation in axis-angle format + :param pose_joints: Array of num_joints * 3 elements where pose_joints[i*3:(i+1)*3] contains the rotation of the + ith joint, in axis-angle format. + + :return: root_translation + :return: root_rotation + :return: new_pose + """ + + axis_angle_root_rotation_vec = mn.Vector3(root_orientation) + root_trans = mn.Vector3(root_translation) + axis_angle_root_rotation_angl = mn.Rad( + axis_angle_root_rotation_vec.length() + ) + root_T = self.final_rotation_correction * mn.Quaternion.rotation( + axis_angle_root_rotation_angl, + axis_angle_root_rotation_vec.normalized(), + ) + root_rotation = root_T.to_matrix() + root_translation = self.final_rotation_correction.transform_vector( + root_trans + ) + new_pose = [] + for model_link_id in range(len(self.joint_info)): + joint_type = self.joint_info[model_link_id][2] + joint_name = self.joint_info[model_link_id][1].decode("UTF-8") + if joint_name not in self.index_joint_map: + pose_joint_index = None + else: + # We subtract 1 cause pose does not contain the root rotation + pose_joint_index = self.index_joint_map[joint_name] - 1 + + # When the target joint do not have dof, we simply ignore it + if joint_type == p.JOINT_FIXED: + continue + + if joint_type not in [p.JOINT_SPHERICAL]: + raise NotImplementedError( + f"Error: {joint_type} is not a supported joint type" + ) + + if pose_joint_index is None: + Ql = [0, 0, 0, 1] + else: + pose_joint_indices = slice( + pose_joint_index * 3, pose_joint_index * 3 + 3 + ) + # get_transform(pose_joint_index, local=True) + + if joint_type == p.JOINT_SPHERICAL: + axis_angle_rotation = mn.Vector3( + pose_joints[pose_joint_indices] + ) + axis_angle_rotation_ang = mn.Rad( + axis_angle_rotation.length() + ) + Q = mn.Quaternion.rotation( + axis_angle_rotation_ang, + axis_angle_rotation.normalized(), + ) + Ql = list(Q.vector) + [float(Q.scalar)] + + new_pose += list(Ql) + return root_translation, root_rotation, new_pose + + def convert_motion_file(self, motion_path, output_path="output_motion"): + """ + Convert a npz file containing a SMPL-X motion into a file of rotations + that can be played in Habitat + :param motion_path: path to the npz file containing a SMPL-X motion. + :param output_path: output path where to save the pkl file with the converted motion + """ + content_motion = np.load(motion_path, allow_pickle=True) + + pose_info = { + "trans": content_motion["trans"], + "root_orient": content_motion["root_orient"], + "pose": content_motion["poses"][:, 3:66], + } + num_poses = content_motion["poses"].shape[0] + transform_array = [] + joints_array = [] + for index in range(num_poses): + root_trans, root_rot, pose_quat = self.convert_pose_to_rotation( + pose_info["trans"][index], + pose_info["root_orient"][index], + pose_info["pose"][index], + ) + transform_as_mat = np.array(mn.Matrix4.from_(root_rot, root_trans)) + transform_array.append(transform_as_mat[None, :]) + joints_array.append(np.array(pose_quat)[None, :]) + + transform_array = np.concatenate(transform_array) + joints_array = np.concatenate(joints_array) + + walk_motion = { + "joints_array": joints_array, + "transform_array": transform_array, + "displacement": None, + "fps": 1, + } + content_motion = { + "pose_motion": walk_motion, + } + with open(f"{output_path}.pkl", "wb+") as ff: + pkl.dump(content_motion, ff) + + +if __name__ == "__main__": + # Converts a npz motion file into a pkl file that can be processed in habitat. Note that the + # resulting pkl file will have the same format as the ones used to move the character in + # Humanoid RearrangeController + # The npz file should contain: + # trans: N x 3, specifying the root translation on each of the N poses + # poses: N x (J*3 + 1) * 3: containing the root rotation, as well as the rotation for each + # of the 21 joints + motion_file = "data/humanoids/humanoid_data/walk_motion/CMU_10_04_stageii.npz" # motion_to_convert (npz) + files = glob.glob(motion_file) + for in_path in files: + convert_helper = MotionConverterSMPLX( + urdf_path="data/humanoids/humanoid_data/female_2/female_2.urdf" + ) + convert_helper.convert_motion_file( + motion_path=in_path, + output_path=in_path.replace(".npz", ""), + ) diff --git a/test/test_humanoid.py b/test/test_humanoid.py index 38f4b2fb95..1dedd268ba 100644 --- a/test/test_humanoid.py +++ b/test/test_humanoid.py @@ -15,7 +15,10 @@ import habitat.articulated_agents.humanoids.kinematic_humanoid as kinematic_humanoid import habitat_sim import habitat_sim.agent -from habitat.articulated_agent_controllers import HumanoidRearrangeController +from habitat.articulated_agent_controllers import ( + HumanoidRearrangeController, + HumanoidSeqPoseController, +) default_sim_settings = { # settings shared by example.py and benchmark.py @@ -315,3 +318,126 @@ def test_gym_humanoid(): hab_gym.step(hab_gym.action_space.sample()) hab_gym.render("rgb_array") hab_gym.close() + + +@pytest.mark.parametrize( + "humanoid_name", + ["female_2"], +) +def test_humanoid_seqpose_controller(humanoid_name): + """Test the humanoid controller""" + + # loading the physical scene + produce_debug_video = False + num_steps = 1000 + cfg_settings = default_sim_settings.copy() + cfg_settings["scene"] = "NONE" + cfg_settings["enable_physics"] = True + + observations = [] + hab_cfg = make_cfg(cfg_settings) + with habitat_sim.Simulator(hab_cfg) as sim: + obj_template_mgr = sim.get_object_template_manager() + rigid_obj_mgr = sim.get_rigid_object_manager() + + # setup the camera for debug video (looking at 0,0,0) + sim.agents[0].scene_node.translation = [0.0, 0.0, 2.0] + + # add a ground plane + cube_handle = obj_template_mgr.get_template_handles("cubeSolid")[0] + cube_template_cpy = obj_template_mgr.get_template_by_handle( + cube_handle + ) + cube_template_cpy.scale = np.array([5.0, 0.2, 5.0]) + obj_template_mgr.register_template(cube_template_cpy) + ground_plane = rigid_obj_mgr.add_object_by_template_handle(cube_handle) + ground_plane.translation = [0.0, -0.2, 0.0] + ground_plane.motion_type = habitat_sim.physics.MotionType.STATIC + + # compute a navmesh on the ground plane + navmesh_settings = habitat_sim.NavMeshSettings() + navmesh_settings.set_defaults() + navmesh_settings.include_static_objects = True + sim.recompute_navmesh(sim.pathfinder, navmesh_settings) + sim.navmesh_visualization = True + + # add the humanoid to the world via the wrapper + humanoid_path = f"data/humanoids/humanoid_data/{humanoid_name}/{humanoid_name}.urdf" + walk_pose_path = f"data/humanoids/humanoid_data/{humanoid_name}/{humanoid_name}_motion_data_smplx.pkl" + + agent_config = DictConfig( + { + "articulated_agent_urdf": humanoid_path, + "motion_data_path": walk_pose_path, + } + ) + if not osp.exists(humanoid_path): + pytest.skip(f"No humanoid file {humanoid_path}") + kin_humanoid = kinematic_humanoid.KinematicHumanoid(agent_config, sim) + kin_humanoid.reconfigure() + kin_humanoid.update() + assert kin_humanoid.get_robot_sim_id() == 1 # 0 is the ground plane + + # set base ground position from navmesh + # NOTE: because the navmesh floats above the collision geometry we should see a pop/settle with dynamics and no fixed base + target_base_pos = sim.pathfinder.snap_point( + kin_humanoid.sim_obj.translation + ) + kin_humanoid.base_pos = target_base_pos + assert kin_humanoid.base_pos == target_base_pos + observations += simulate(sim, 0.01, produce_debug_video) + + # Test controller + motion_path = ( + "data/humanoids/humanoid_data/walk_motion/CMU_10_04_stageii.pkl" + ) + + if not osp.exists(motion_path): + pytest.skip( + f"No motion file {motion_path}. You can create this file by using humanoid_utils.py" + ) + humanoid_controller = HumanoidSeqPoseController(motion_path) + + base_trans = kin_humanoid.base_transformation + step_count = 0 + humanoid_controller.reset(base_trans) + while step_count < num_steps: + humanoid_controller.calculate_pose() + humanoid_controller.next_pose(cycle=True) + new_pose = humanoid_controller.get_pose() + + new_joints = new_pose[:-16] + new_pos_transform_base = new_pose[-16:] + new_pos_transform_offset = new_pose[-32:-16] + + # When the array is all 0, this indicates we are not setting + # the human joint + if np.array(new_pos_transform_offset).sum() != 0: + vecs_base = [ + mn.Vector4(new_pos_transform_base[i * 4 : (i + 1) * 4]) + for i in range(4) + ] + vecs_offset = [ + mn.Vector4(new_pos_transform_offset[i * 4 : (i + 1) * 4]) + for i in range(4) + ] + new_transform_offset = mn.Matrix4(*vecs_offset) + new_transform_base = mn.Matrix4(*vecs_base) + kin_humanoid.set_joint_transform( + new_joints, new_transform_offset, new_transform_base + ) + observations += simulate(sim, 0.0001, produce_debug_video) + + step_count += 1 + + # produce some test debug video + if produce_debug_video: + from habitat_sim.utils import viz_utils as vut + + vut.make_video( + observations, + "color_sensor", + "color", + "test_humanoid_wrapper", + open_vid=True, + )