From 21fbd5a3238c656d77cf1fae463ea170576b2d15 Mon Sep 17 00:00:00 2001 From: hofee Date: Fri, 5 Sep 2025 15:49:00 +0800 Subject: [PATCH] solve dependencies problem --- .pbignore | 0 configs/generate_data_config.yaml | 7 + configs/generate_task_config.yaml | 1 + data_gen_dependencies/__init__.py | 35 + data_gen_dependencies/action/__init__.py | 30 + data_gen_dependencies/action/base.py | 110 ++ data_gen_dependencies/action/grasp.py | 136 +++ data_gen_dependencies/action/insert.py | 53 + data_gen_dependencies/action/place.py | 64 ++ data_gen_dependencies/action/pour.py | 97 ++ data_gen_dependencies/action/pull.py | 59 + data_gen_dependencies/action/push.py | 95 ++ data_gen_dependencies/action/slide.py | 47 + .../aimdk/protocol/CMakeLists.txt | 8 + .../aimdk/protocol/README.md | 25 + .../aimdk/protocol/common/CMakeLists.txt | 5 + .../aimdk/protocol/common/README.md | 9 + .../aimdk/protocol/common/battery_state.proto | 95 ++ .../aimdk/protocol/common/box.proto | 13 + .../aimdk/protocol/common/common.proto | 20 + .../aimdk/protocol/common/header.proto | 71 ++ .../aimdk/protocol/common/header_pb2.py | 35 + .../aimdk/protocol/common/image.proto | 22 + .../aimdk/protocol/common/joint.proto | 100 ++ .../aimdk/protocol/common/joint_pb2.py | 30 + .../aimdk/protocol/common/odometry.proto | 17 + .../aimdk/protocol/common/pixel_pose.proto | 31 + .../aimdk/protocol/common/qr_pose.proto | 13 + .../aimdk/protocol/common/quaternion.proto | 13 + .../aimdk/protocol/common/quaternion_pb2.py | 26 + .../aimdk/protocol/common/rpc.proto | 54 + .../aimdk/protocol/common/rpc_pb2.py | 38 + .../aimdk/protocol/common/rpy.proto | 13 + .../aimdk/protocol/common/rpy_pb2.py | 26 + .../aimdk/protocol/common/rpy_pb2_grpc.py | 4 + .../protocol/common/se2_acceleration.proto | 13 + .../aimdk/protocol/common/se2_pose.proto | 13 + .../aimdk/protocol/common/se2_velocity.proto | 13 + .../protocol/common/se3_acceleration.proto | 14 + .../aimdk/protocol/common/se3_pose.proto | 23 + .../aimdk/protocol/common/se3_pose_pb2.py | 34 + .../protocol/common/se3_trajectory.proto | 25 + .../aimdk/protocol/common/se3_velocity.proto | 13 + .../aimdk/protocol/common/timestamp.proto | 48 + .../aimdk/protocol/common/timestamp_pb2.py | 26 + .../aimdk/protocol/common/twist.proto | 22 + .../aimdk/protocol/common/vec2.proto | 11 + .../aimdk/protocol/common/vec3.proto | 12 + .../aimdk/protocol/common/vec3_pb2.py | 26 + .../aimdk/protocol/common/wrench.proto | 22 + .../aimdk/protocol/hal/CMakeLists.txt | 5 + .../aimdk/protocol/hal/hand/agi_claw.proto | 20 + .../aimdk/protocol/hal/hand/agi_claw_pb2.py | 28 + .../protocol/hal/hand/hal_hand_service.proto | 25 + .../protocol/hal/hand/hal_hand_service_pb2.py | 60 + .../hal/hand/hal_hand_service_pb2_grpc.py | 103 ++ .../aimdk/protocol/hal/hand/hand.proto | 35 + .../protocol/hal/hand/hand_channel.proto | 18 + .../aimdk/protocol/hal/hand/hand_pb2.py | 40 + .../aimdk/protocol/hal/hand/inspire.proto | 25 + .../aimdk/protocol/hal/hand/inspire_pb2.py | 30 + .../aimdk/protocol/hal/hand/jodell.proto | 10 + .../aimdk/protocol/hal/hand/jodell_pb2.py | 26 + .../aimdk/protocol/hal/jaka/jaka.proto | 733 ++++++++++++ .../aimdk/protocol/hal/jaka/jaka_pb2.py | 174 +++ .../aimdk/protocol/hal/jaka/jaka_pb2_grpc.py | 1001 +++++++++++++++++ .../protocol/hal/joint/joint_channel.proto | 69 ++ .../protocol/hal/joint/joint_channel_pb2.py | 66 ++ .../hal/joint/joint_channel_pb2_grpc.py | 165 +++ .../aimdk/protocol/hal/joint/msg/Command.msg | 7 + .../protocol/hal/joint/msg/JointCommand.msg | 3 + .../protocol/hal/joint/msg/JointState.msg | 3 + .../aimdk/protocol/hal/joint/msg/State.msg | 5 + .../protocol/hal/pose/msg/PoseCommand.msg | 6 + .../protocol/hal/pose/msg/PoseCommands.msg | 4 + .../aimdk/protocol/hal/pose/msg/PoseState.msg | 6 + .../protocol/hal/pose/msg/PoseStates.msg | 5 + .../protocol/hal/pose/msg/TorqSensorState.msg | 3 + .../hal/sensors/msg/FootSensorState.msg | 4 + .../protocol/hal/sensors/msg/LivoxPoint.msg | 9 + .../hal/sensors/msg/LivoxPointCloud.msg | 8 + .../protocol/hal/sensors/msg/SensorState.msg | 1 + .../protocol/hal/sensors/rs2_camera.proto | 98 ++ .../protocol/hal/sensors/rs2_camera_pb2.py | 42 + .../hal/sensors/rs2_camera_pb2_grpc.py | 113 ++ .../protocol/sim/sim_camera_service.proto | 25 + .../protocol/sim/sim_camera_service_pb2.py | 34 + .../sim/sim_camera_service_pb2_grpc.py | 66 ++ .../protocol/sim/sim_gripper_service.proto | 24 + .../protocol/sim/sim_gripper_service_pb2.py | 48 + .../sim/sim_gripper_service_pb2_grpc.py | 99 ++ .../protocol/sim/sim_joint_service.proto | 0 .../protocol/sim/sim_object_service.proto | 57 + .../protocol/sim/sim_object_service_pb2.py | 60 + .../sim/sim_object_service_pb2_grpc.py | 165 +++ .../sim/sim_observation_service.proto | 287 +++++ .../sim/sim_observation_service_pb2.py | 142 +++ .../sim/sim_observation_service_pb2_grpc.py | 594 ++++++++++ data_gen_dependencies/base_agent.py | 307 +++++ data_gen_dependencies/client.py | 962 ++++++++++++++++ data_gen_dependencies/data_generate.py | 33 +- data_gen_dependencies/data_utils.py | 30 + data_gen_dependencies/fix_rotation.py | 306 +++++ .../gripper_width_120s_fixed.json | 502 +++++++++ data_gen_dependencies/manip_solver.py | 657 +++++++++++ data_gen_dependencies/object.py | 271 +++++ data_gen_dependencies/omni_robot.py | 540 +++++++++ data_gen_dependencies/omniagent.py | 459 ++++++++ data_gen_dependencies/robot.py | 290 +++++ data_gen_dependencies/transforms.py | 149 +++ data_gen_dependencies/utils.py | 558 +++++++++ runners/data_generator.py | 1 + runners/task_generator.py | 16 +- task_gen_dependencies/task_generate.py | 7 +- 114 files changed, 11337 insertions(+), 19 deletions(-) create mode 100644 .pbignore create mode 100644 configs/generate_data_config.yaml create mode 100644 data_gen_dependencies/action/__init__.py create mode 100644 data_gen_dependencies/action/base.py create mode 100644 data_gen_dependencies/action/grasp.py create mode 100644 data_gen_dependencies/action/insert.py create mode 100644 data_gen_dependencies/action/place.py create mode 100644 data_gen_dependencies/action/pour.py create mode 100644 data_gen_dependencies/action/pull.py create mode 100644 data_gen_dependencies/action/push.py create mode 100644 data_gen_dependencies/action/slide.py create mode 100644 data_gen_dependencies/aimdk/protocol/CMakeLists.txt create mode 100644 data_gen_dependencies/aimdk/protocol/README.md create mode 100644 data_gen_dependencies/aimdk/protocol/common/CMakeLists.txt create mode 100644 data_gen_dependencies/aimdk/protocol/common/README.md create mode 100644 data_gen_dependencies/aimdk/protocol/common/battery_state.proto create mode 100644 data_gen_dependencies/aimdk/protocol/common/box.proto create mode 100644 data_gen_dependencies/aimdk/protocol/common/common.proto create mode 100644 data_gen_dependencies/aimdk/protocol/common/header.proto create mode 100644 data_gen_dependencies/aimdk/protocol/common/header_pb2.py create mode 100644 data_gen_dependencies/aimdk/protocol/common/image.proto create mode 100644 data_gen_dependencies/aimdk/protocol/common/joint.proto create mode 100644 data_gen_dependencies/aimdk/protocol/common/joint_pb2.py create mode 100644 data_gen_dependencies/aimdk/protocol/common/odometry.proto create mode 100644 data_gen_dependencies/aimdk/protocol/common/pixel_pose.proto create mode 100644 data_gen_dependencies/aimdk/protocol/common/qr_pose.proto create mode 100644 data_gen_dependencies/aimdk/protocol/common/quaternion.proto create mode 100644 data_gen_dependencies/aimdk/protocol/common/quaternion_pb2.py create mode 100644 data_gen_dependencies/aimdk/protocol/common/rpc.proto create mode 100644 data_gen_dependencies/aimdk/protocol/common/rpc_pb2.py create mode 100644 data_gen_dependencies/aimdk/protocol/common/rpy.proto create mode 100644 data_gen_dependencies/aimdk/protocol/common/rpy_pb2.py create mode 100644 data_gen_dependencies/aimdk/protocol/common/rpy_pb2_grpc.py create mode 100644 data_gen_dependencies/aimdk/protocol/common/se2_acceleration.proto create mode 100644 data_gen_dependencies/aimdk/protocol/common/se2_pose.proto create mode 100644 data_gen_dependencies/aimdk/protocol/common/se2_velocity.proto create mode 100644 data_gen_dependencies/aimdk/protocol/common/se3_acceleration.proto create mode 100644 data_gen_dependencies/aimdk/protocol/common/se3_pose.proto create mode 100644 data_gen_dependencies/aimdk/protocol/common/se3_pose_pb2.py create mode 100644 data_gen_dependencies/aimdk/protocol/common/se3_trajectory.proto create mode 100644 data_gen_dependencies/aimdk/protocol/common/se3_velocity.proto create mode 100644 data_gen_dependencies/aimdk/protocol/common/timestamp.proto create mode 100644 data_gen_dependencies/aimdk/protocol/common/timestamp_pb2.py create mode 100644 data_gen_dependencies/aimdk/protocol/common/twist.proto create mode 100644 data_gen_dependencies/aimdk/protocol/common/vec2.proto create mode 100644 data_gen_dependencies/aimdk/protocol/common/vec3.proto create mode 100644 data_gen_dependencies/aimdk/protocol/common/vec3_pb2.py create mode 100644 data_gen_dependencies/aimdk/protocol/common/wrench.proto create mode 100644 data_gen_dependencies/aimdk/protocol/hal/CMakeLists.txt create mode 100644 data_gen_dependencies/aimdk/protocol/hal/hand/agi_claw.proto create mode 100644 data_gen_dependencies/aimdk/protocol/hal/hand/agi_claw_pb2.py create mode 100644 data_gen_dependencies/aimdk/protocol/hal/hand/hal_hand_service.proto create mode 100644 data_gen_dependencies/aimdk/protocol/hal/hand/hal_hand_service_pb2.py create mode 100644 data_gen_dependencies/aimdk/protocol/hal/hand/hal_hand_service_pb2_grpc.py create mode 100644 data_gen_dependencies/aimdk/protocol/hal/hand/hand.proto create mode 100644 data_gen_dependencies/aimdk/protocol/hal/hand/hand_channel.proto create mode 100644 data_gen_dependencies/aimdk/protocol/hal/hand/hand_pb2.py create mode 100644 data_gen_dependencies/aimdk/protocol/hal/hand/inspire.proto create mode 100644 data_gen_dependencies/aimdk/protocol/hal/hand/inspire_pb2.py create mode 100644 data_gen_dependencies/aimdk/protocol/hal/hand/jodell.proto create mode 100644 data_gen_dependencies/aimdk/protocol/hal/hand/jodell_pb2.py create mode 100644 data_gen_dependencies/aimdk/protocol/hal/jaka/jaka.proto create mode 100644 data_gen_dependencies/aimdk/protocol/hal/jaka/jaka_pb2.py create mode 100644 data_gen_dependencies/aimdk/protocol/hal/jaka/jaka_pb2_grpc.py create mode 100644 data_gen_dependencies/aimdk/protocol/hal/joint/joint_channel.proto create mode 100644 data_gen_dependencies/aimdk/protocol/hal/joint/joint_channel_pb2.py create mode 100644 data_gen_dependencies/aimdk/protocol/hal/joint/joint_channel_pb2_grpc.py create mode 100644 data_gen_dependencies/aimdk/protocol/hal/joint/msg/Command.msg create mode 100644 data_gen_dependencies/aimdk/protocol/hal/joint/msg/JointCommand.msg create mode 100644 data_gen_dependencies/aimdk/protocol/hal/joint/msg/JointState.msg create mode 100644 data_gen_dependencies/aimdk/protocol/hal/joint/msg/State.msg create mode 100644 data_gen_dependencies/aimdk/protocol/hal/pose/msg/PoseCommand.msg create mode 100644 data_gen_dependencies/aimdk/protocol/hal/pose/msg/PoseCommands.msg create mode 100644 data_gen_dependencies/aimdk/protocol/hal/pose/msg/PoseState.msg create mode 100644 data_gen_dependencies/aimdk/protocol/hal/pose/msg/PoseStates.msg create mode 100644 data_gen_dependencies/aimdk/protocol/hal/pose/msg/TorqSensorState.msg create mode 100644 data_gen_dependencies/aimdk/protocol/hal/sensors/msg/FootSensorState.msg create mode 100644 data_gen_dependencies/aimdk/protocol/hal/sensors/msg/LivoxPoint.msg create mode 100644 data_gen_dependencies/aimdk/protocol/hal/sensors/msg/LivoxPointCloud.msg create mode 100644 data_gen_dependencies/aimdk/protocol/hal/sensors/msg/SensorState.msg create mode 100644 data_gen_dependencies/aimdk/protocol/hal/sensors/rs2_camera.proto create mode 100644 data_gen_dependencies/aimdk/protocol/hal/sensors/rs2_camera_pb2.py create mode 100644 data_gen_dependencies/aimdk/protocol/hal/sensors/rs2_camera_pb2_grpc.py create mode 100644 data_gen_dependencies/aimdk/protocol/sim/sim_camera_service.proto create mode 100644 data_gen_dependencies/aimdk/protocol/sim/sim_camera_service_pb2.py create mode 100644 data_gen_dependencies/aimdk/protocol/sim/sim_camera_service_pb2_grpc.py create mode 100644 data_gen_dependencies/aimdk/protocol/sim/sim_gripper_service.proto create mode 100644 data_gen_dependencies/aimdk/protocol/sim/sim_gripper_service_pb2.py create mode 100644 data_gen_dependencies/aimdk/protocol/sim/sim_gripper_service_pb2_grpc.py create mode 100644 data_gen_dependencies/aimdk/protocol/sim/sim_joint_service.proto create mode 100644 data_gen_dependencies/aimdk/protocol/sim/sim_object_service.proto create mode 100644 data_gen_dependencies/aimdk/protocol/sim/sim_object_service_pb2.py create mode 100644 data_gen_dependencies/aimdk/protocol/sim/sim_object_service_pb2_grpc.py create mode 100644 data_gen_dependencies/aimdk/protocol/sim/sim_observation_service.proto create mode 100644 data_gen_dependencies/aimdk/protocol/sim/sim_observation_service_pb2.py create mode 100644 data_gen_dependencies/aimdk/protocol/sim/sim_observation_service_pb2_grpc.py create mode 100644 data_gen_dependencies/base_agent.py create mode 100644 data_gen_dependencies/client.py create mode 100644 data_gen_dependencies/data_utils.py create mode 100644 data_gen_dependencies/fix_rotation.py create mode 100644 data_gen_dependencies/gripper_width_120s_fixed.json create mode 100644 data_gen_dependencies/manip_solver.py create mode 100644 data_gen_dependencies/object.py create mode 100644 data_gen_dependencies/omni_robot.py create mode 100644 data_gen_dependencies/omniagent.py create mode 100644 data_gen_dependencies/robot.py create mode 100644 data_gen_dependencies/transforms.py create mode 100644 data_gen_dependencies/utils.py diff --git a/.pbignore b/.pbignore new file mode 100644 index 0000000..e69de29 diff --git a/configs/generate_data_config.yaml b/configs/generate_data_config.yaml new file mode 100644 index 0000000..bea32e6 --- /dev/null +++ b/configs/generate_data_config.yaml @@ -0,0 +1,7 @@ +runner: + workspace: + name: "generate_data" + root_dir: "workspace" + generate: + input_data_root: "/home/ubuntu/Projects/docker_isaac_sim/input/data" + input_target_task_templates_path: "workspace/divide_task/template_targets/task_template_target_0.json" diff --git a/configs/generate_task_config.yaml b/configs/generate_task_config.yaml index b0910ba..cf05eaf 100644 --- a/configs/generate_task_config.yaml +++ b/configs/generate_task_config.yaml @@ -6,3 +6,4 @@ runner: input_data_root: "/home/ubuntu/Projects/docker_isaac_sim/input/data" input_target_task_templates_path: "workspace/divide_task/template_targets/task_template_target_0.json" output_task_root_dir: + output_generate_result_path: \ No newline at end of file diff --git a/data_gen_dependencies/__init__.py b/data_gen_dependencies/__init__.py index e69de29..e0b52bd 100644 --- a/data_gen_dependencies/__init__.py +++ b/data_gen_dependencies/__init__.py @@ -0,0 +1,35 @@ +from abc import abstractmethod, ABC + + +class Robot(ABC): + @abstractmethod + def get_prim_world_pose(self, prim_path): + pass + + @abstractmethod + def reset(self): + pass + + @abstractmethod + def open_gripper(self, id="left", width=0.1): + pass + + @abstractmethod + def close_gripper(self, id="left", force=50): + pass + + @abstractmethod + def move(self, target, type="pose", speed=None): + pass + + @abstractmethod + def decode_gripper_pose(self, gripper_pose): + pass + + @abstractmethod + def get_ee_pose(self, id="right"): + pass + + @abstractmethod + def check_ik(self, pose, id="right"): + pass diff --git a/data_gen_dependencies/action/__init__.py b/data_gen_dependencies/action/__init__.py new file mode 100644 index 0000000..74f1133 --- /dev/null +++ b/data_gen_dependencies/action/__init__.py @@ -0,0 +1,30 @@ +from .grasp import GraspStage, PickStage, HookStage +from .place import PlaceStage +from .insert import InsertStage, HitStage +from .slide import SlideStage +from .pour import PourStage +from .pull import PullStage +from .push import PushStage, ClickStage + +ACTION_STAGE = { + "grasp": GraspStage, + "pick": PickStage, + "hook": HookStage, + "place": PlaceStage, + "insert": InsertStage, + "slide": SlideStage, + "shave": NotImplemented, + "brush": NotImplemented, + "wipe": NotImplemented, + "hit": NotImplemented, + "pour": PourStage, + "push": PushStage, + 'click': ClickStage, + 'touch': ClickStage, + "pull": PullStage +} + +def build_stage(action): + if action not in ACTION_STAGE: + raise NotImplementedError + return ACTION_STAGE[action] diff --git a/data_gen_dependencies/action/base.py b/data_gen_dependencies/action/base.py new file mode 100644 index 0000000..2e8612e --- /dev/null +++ b/data_gen_dependencies/action/base.py @@ -0,0 +1,110 @@ +import copy +from collections import deque + +import numpy as np +from data_gen_dependencies.data_utils import pose_difference + + +def simple_check_completion(goal, objects, last_statement=None, pos_threshold=0.06, angle_threshold=70, is_grasped=False): + active_obj_id, passive_obj_id, target_pose_canonical, gripper_action, transform_world, motion_type = goal + if target_pose_canonical is None: + return True + if gripper_action=='open': + return True + + current_pose_world = objects[active_obj_id].obj_pose + if len(target_pose_canonical.shape)==3: + target_pose_canonical = target_pose_canonical[-1] + transform_world = transform_world[-1] + target_pose_world = objects[passive_obj_id].obj_pose @ target_pose_canonical + if not is_grasped: + target_pose_world = np.dot(transform_world, target_pose_world) + + pos_diff, angle_diff = pose_difference(current_pose_world, target_pose_world) + success = (pos_diff < pos_threshold) and (angle_diff < angle_threshold) + return success + + +def solve_target_gripper_pose(stage, objects): + active_obj_ID, passive_obj_ID, target_pose_canonical, gripper_action, transform_world, motion_type = stage + + anchor_pose = objects[passive_obj_ID].obj_pose + + + if motion_type=='Trajectory': + assert len(target_pose_canonical.shape)==3, 'The target_pose should be a list of poses' + target_pose = anchor_pose[np.newaxis, ...] @ target_pose_canonical + target_pose = transform_world @ target_pose + else: + target_pose = anchor_pose @ target_pose_canonical + target_pose = transform_world @ target_pose + assert 'gripper' in objects, 'The gripper should be the first one in the object list' + current_gripper_pose = objects['gripper'].obj_pose + + if active_obj_ID=='gripper': + target_gripper_pose = target_pose + else: + current_obj_pose = objects[active_obj_ID].obj_pose + gripper2obj = np.linalg.inv(current_obj_pose) @ current_gripper_pose + if len(target_pose.shape)==3: + gripper2obj = gripper2obj[np.newaxis, ...] + + target_obj_pose = target_pose + target_gripper_pose = target_obj_pose @ gripper2obj + + return target_gripper_pose + + + +class StageTemplate: + def __init__(self, active_obj_id, passive_obj_id, active_element, passive_element): + self.active_obj_id = active_obj_id + self.passive_obj_id = passive_obj_id + self.active_element = active_element + self.passive_element = passive_element + + self.last_statement = None + self.sub_stages = deque() + self.step_id = 0 + self.extra_params = {} + + def generate_substage(self): + raise NotImplementedError + + def __len__(self) -> int: + return len(self.sub_stages) - self.step_id + + def get_action(self, objects): + if self.__len__()==0: + return None + gripper_pose_canonical, gripper_action, transform_world, motion_type = self.sub_stages[self.step_id] + + if motion_type == 'local_gripper': + delta_pose = gripper_pose_canonical + gripper_pose = objects['gripper'].obj_pose + target_gripper_pose = gripper_pose @ delta_pose + motion_type = 'Straight' + else: + + if gripper_pose_canonical is None: + target_gripper_pose = None + else: + goal_datapack = [self.active_obj_id, self.passive_obj_id] + self.sub_stages[self.step_id] + target_gripper_pose = solve_target_gripper_pose(goal_datapack, objects) + + last_statement = {'objects': copy.deepcopy(objects), 'target_gripper_pose': target_gripper_pose} + self.last_statement = last_statement + return target_gripper_pose, motion_type, gripper_action, self.extra_params.get('arm', 'right') + + + def check_completion(self, objects): + if self.__len__()==0: + return True + goal_datapack = [self.active_obj_id, self.passive_obj_id] + self.sub_stages[self.step_id] + succ = simple_check_completion(goal_datapack, objects) + if succ: + self.step_id += 1 + return succ + + + diff --git a/data_gen_dependencies/action/grasp.py b/data_gen_dependencies/action/grasp.py new file mode 100644 index 0000000..74607e6 --- /dev/null +++ b/data_gen_dependencies/action/grasp.py @@ -0,0 +1,136 @@ +import numpy as np + +from data_gen_dependencies.action.base import StageTemplate, simple_check_completion, pose_difference + + +class PickStage(StageTemplate): + def __init__(self, active_obj_id, passive_obj_id, active_element, passive_element, grasp_pose, extra_params=None, **kwargs): + super().__init__(active_obj_id, passive_obj_id, active_element, passive_element) + self.grasp_pose_canonical = grasp_pose + self.use_pre_grasp = False + + self.extra_params = {} if extra_params is None else extra_params + self.pick_up_step = 999 + self.generate_substage(grasp_pose) + + + def generate_substage(self, grasp_pose): + pick_up_distance = self.extra_params.get('pick_up_distance', 0.12) + pick_up_type = self.extra_params.get('pick_up_type', 'Simple') + pick_up_direction = self.extra_params.get('pick_up_direction', 'z') + num_against = self.extra_params.get('against', 0) + if self.use_pre_grasp: + pre_grasp_distance = self.extra_params.get('pre_grasp_distance', 0.08) + # sub-stage-0 moveTo pregrasp pose + pre_pose = np.array([ + [1., 0, 0, 0], + [0, 1., 0, 0], + [0, 0, 1., -pre_grasp_distance], + [0, 0, 0, 1]]) + pre_grasp_pose = grasp_pose @ pre_pose + self.sub_stages.append([pre_grasp_pose, None, np.eye(4), 'AvoidObs']) + # sub-stage-1 moveTo grasp pose + self.sub_stages.append([grasp_pose, 'close', np.eye(4), 'Simple']) + self.pick_up_step = 2 + elif num_against > 0: + for index in range(num_against): + self.sub_stages.append([grasp_pose, 'open', np.eye(4), 'AvoidObs']) + self.sub_stages.append([grasp_pose, 'close', np.eye(4), 'AvoidObs']) + self.pick_up_step = num_against + 1 + gripper_action = None + motion_type = pick_up_type + transform_up = np.eye(4) + if pick_up_direction == "x": + transform_up[0,3] = pick_up_distance + transform_up[2,3] = 0.02 + elif pick_up_direction == "y": + transform_up[1,3] = pick_up_distance + else: + transform_up[2,3] = pick_up_distance + self.sub_stages.append([grasp_pose, gripper_action, transform_up, motion_type]) + else: + # sub-stage-0 moveTo grasp pose + self.sub_stages.append([grasp_pose, 'close', np.eye(4), 'AvoidObs']) + self.pick_up_step = 1 + + # pick-up + gripper_action = None + motion_type = pick_up_type + transform_up = np.eye(4) + if pick_up_direction == "x": + transform_up[0,3] = pick_up_distance + transform_up[2,3] = 0.02 + elif pick_up_direction == "y": + transform_up[1,3] = pick_up_distance + else: + transform_up[2,3] = pick_up_distance + self.sub_stages.append([grasp_pose, gripper_action, transform_up, motion_type]) + + + + def check_completion(self, objects): + if self.__len__()==0: + return True + + goal_datapack = [self.active_obj_id, self.passive_obj_id] + self.sub_stages[self.step_id] + + succ = True + if self.step_id < self.pick_up_step: + succ = simple_check_completion(goal_datapack, objects, is_grasped=True) + + if succ: + self.step_id += 1 + return succ + + + + + + + + +class GraspStage(PickStage): + def generate_substage(self, grasp_pose): + motion_type = self.extra_params.get('move_type', 'AvoidObs') + + if self.extra_params.get('use_pre_grasp', False): + pre_pose = np.array([ + [1., 0, 0, 0], + [0, 1., 0, 0], + [0, 0, 1., -0.08], + [0, 0, 0, 1]]) + pre_grasp_pose = grasp_pose @ pre_pose + gripper_action = None + + self.sub_stages.append([pre_grasp_pose, None, np.eye(4), motion_type]) + # sub-stage-1 moveTo grasp pose + self.sub_stages.append([grasp_pose, 'close', np.eye(4), motion_type]) + + + + + + def check_completion(self, objects): + if self.__len__()==0: + return True + + goal_datapack = [self.active_obj_id, self.passive_obj_id] + self.sub_stages[self.step_id] + + pos_threshold = self.extra_params.get('position_threshold', 0.06) + succ = simple_check_completion(goal_datapack, objects, pos_threshold=pos_threshold, is_grasped=True) + + if self.step_id >= 2: + gripper_pose = objects['gripper'].obj_pose + grasp_pose = objects[self.passive_obj_id].obj_pose @ self.grasp_pose_canonical + pos_diff, _ = pose_difference(gripper_pose, grasp_pose) + grasp_succ = pos_diff < pos_threshold + succ = succ and grasp_succ + + if succ: + self.step_id += 1 + return succ + + +class HookStage(PickStage): + def generate_substage(self, grasp_pose, *args, **kwargs): + self.sub_stages.append([grasp_pose, None, np.eye(4), 'AvoidObs']) diff --git a/data_gen_dependencies/action/insert.py b/data_gen_dependencies/action/insert.py new file mode 100644 index 0000000..0a8a78b --- /dev/null +++ b/data_gen_dependencies/action/insert.py @@ -0,0 +1,53 @@ +import numpy as np + +from data_gen_dependencies.action.base import StageTemplate, simple_check_completion + + +class InsertStage(StageTemplate): + def __init__(self, active_obj_id, passive_obj_id, active_element=None, passive_element=None, target_pose=None, vector_direction=None, extra_params={}, **kwargs): + super().__init__(active_obj_id, passive_obj_id, active_element, passive_element) + + + self.move_distance = 0.01 + self.generate_substage(target_pose, vector_direction, extra_params) + + + def generate_substage(self, target_pose, vector_direction, extra_params={}): + insert_pre_distance = extra_params.get('insert_pre_distance', -0.08) + insert_motion_type = extra_params.get('insert_motion_type', 'Simple') + + vector_direction = vector_direction / np.linalg.norm(vector_direction) + + # moveTo pre-place position + pre_pose = target_pose.copy() + pre_pose[:3,3] += vector_direction * insert_pre_distance + self.sub_stages.append([pre_pose, None, np.eye(4), 'AvoidObs']) + + # insert + move_pose = target_pose.copy() + move_pose[:3,3] += vector_direction * self.move_distance + self.sub_stages.append([move_pose, None, np.eye(4), insert_motion_type]) + + # open gripper + self.sub_stages.append([None, 'open', np.eye(4), None]) + + def check_completion(self, objects): + if self.__len__()==0: + return True + goal_datapack = [self.active_obj_id, self.passive_obj_id] + self.sub_stages[self.step_id] + succ = simple_check_completion(goal_datapack, objects) + if succ: + self.step_id += 1 + return succ + + + +class HitStage(InsertStage): + def __init__(self, active_obj_id, passive_obj_id, active_element=None, passive_element=None, target_pose=np.eye(4), vector_direction=None, **kwargs): + super(InsertStage, self).__init__(active_obj_id, passive_obj_id, active_element, passive_element) + + self.pre_distance = -0.05 + self.move_distance = 0.005 + self.generate_substage(target_pose, vector_direction) + + diff --git a/data_gen_dependencies/action/place.py b/data_gen_dependencies/action/place.py new file mode 100644 index 0000000..52b8c41 --- /dev/null +++ b/data_gen_dependencies/action/place.py @@ -0,0 +1,64 @@ +import numpy as np + +from data_gen_dependencies.action.base import StageTemplate, simple_check_completion + + +class PlaceStage(StageTemplate): + def __init__(self, active_obj_id, passive_obj_id, active_element=None, passive_element=None, target_pose=None, extra_params={}, **kwargs): + super().__init__(active_obj_id, passive_obj_id, active_element, passive_element) + self.pre_transform_up =0.12 + self.place_transform_up = np.array([0, 0, 0.01]) + self.extra_params = {} if extra_params is None else extra_params + self.use_pre_place = self.extra_params.get('use_pre_place', True) + self.generate_substage(target_pose) + + + + + def generate_substage(self, target_pose): + target_pose_canonical = target_pose + gripper_cmd = self.extra_params.get('gripper_state', 'open') + pre_place_direction = self.extra_params.get('pre_place_direction', 'z') + print(gripper_cmd) + num_against = self.extra_params.get('against', 0) + if self.use_pre_place: + # moveTo pre-place position + transform_up = np.eye(4) + print(transform_up) + if pre_place_direction == "x": + transform_up[0,3] = self.pre_transform_up + transform_up[2,3] = 0.02 + elif pre_place_direction == "y": + transform_up[1,3] = self.pre_transform_up + else: + transform_up[2,3] = self.pre_transform_up + self.sub_stages.append([target_pose_canonical, None, transform_up, 'AvoidObs']) + + # place + palce_transform_up = np.eye(4) + palce_transform_up[:3,3] = self.place_transform_up + if num_against > 0: + for index in range(num_against): + self.sub_stages.append([target_pose_canonical, None, transform_up, 'AvoidObs']) + self.pick_up_step = num_against + 1 + self.sub_stages.append([target_pose_canonical, gripper_cmd, palce_transform_up, 'Simple']) + else: + palce_transform_up = np.eye(4) + palce_transform_up[:3,3] = self.place_transform_up + self.sub_stages.append([target_pose_canonical, None, palce_transform_up, 'AvoidObs']) + + self.sub_stages.append([None, gripper_cmd, np.eye(4), 'Simple']) + + def check_completion(self, objects): + if self.__len__()==0: + return True + goal_datapack = [self.active_obj_id, self.passive_obj_id] + self.sub_stages[self.step_id] + succ = True + if self.step_id==0: + succ = simple_check_completion(goal_datapack, objects) + if succ: + self.step_id += 1 + return succ + + + diff --git a/data_gen_dependencies/action/pour.py b/data_gen_dependencies/action/pour.py new file mode 100644 index 0000000..79e6592 --- /dev/null +++ b/data_gen_dependencies/action/pour.py @@ -0,0 +1,97 @@ +from data_gen_dependencies.action.base import StageTemplate, solve_target_gripper_pose, simple_check_completion +from data_gen_dependencies.fix_rotation import interpolate_rotation_matrices + +import copy +import numpy as np + + + + + +class PourStage(StageTemplate): + def __init__(self, active_obj_id, passive_obj_id, active_element=None, passive_element=None, target_pose=None, current_pose=None, obj2part=None, **kwargs): + super().__init__(active_obj_id, passive_obj_id, active_element, passive_element) + + self.pre_pour_height = 0.25 + self.pour_heigth = 0.08 + self.generate_substage(target_pose, current_pose, obj2part) + + + def generate_substage(self, target_pose, current_pose, obj2part): + target_part_pose = target_pose @ np.linalg.inv(obj2part) + current_part_pose = current_pose @ np.linalg.inv(obj2part) + + gripper_action = None + transform_pre_pour = np.eye(4) + transform_pre_pour[2,3] = self.pre_pour_height # 8cm above the target pose + + + # moveTo pre-pour position + pre_pour_part_pose = np.copy(target_part_pose) + pre_pour_part_pose[:3, :3] = current_part_pose[:3, :3] + # # pre_pour at 2/3 position from current to target + # pos_diff = target_part_pose[:3, 3] - current_part_pose[:3, 3] + # pre_pour_part_pose[:3, 3] = current_part_pose[:3, 3] + pos_diff * 1/2 + # pre_pour_part_pose[:3, :3] = current_part_pose[:3, :3] + pre_pour_pose = pre_pour_part_pose @ obj2part + motion_type = 'AvoidObs' + self.sub_stages.append([pre_pour_pose, gripper_action, transform_pre_pour, motion_type]) + + + motion_type = 'Trajectory' + rotations = interpolate_rotation_matrices(current_part_pose[:3,:3], target_part_pose[:3,:3], 200) + target_part_pose_list = np.tile(target_part_pose, (len(rotations), 1, 1)) + target_part_pose_list[:, :3, :3] = rotations + target_pose_list = target_part_pose_list @ obj2part[np.newaxis, ...] + + transform_pour = np.tile(np.eye(4), (len(rotations), 1, 1)) + # transform_pour[:, 2, 3] = self.pre_pour_height + transform_pour[:, 2, 3] = np.linspace(self.pre_pour_height, self.pour_heigth, len(rotations)) + # import ipdb; ipdb.set_trace() + self.sub_stages.append([target_pose_list, gripper_action, transform_pour, motion_type]) + + + def get_action(self, objects): + if self.__len__()==0: + return None + gripper_pose_canonical, gripper_action, transform_world, motion_type = self.sub_stages[self.step_id] + + if gripper_pose_canonical is None: + target_gripper_pose = None + else: + goal_datapack = [self.active_obj_id, self.passive_obj_id] + self.sub_stages[self.step_id] + target_gripper_pose = solve_target_gripper_pose(goal_datapack, objects) + + # current_gripper_pose = objects['gripper'].obj_pose + # if self.step_id==0: + # # pre_pour at 5cm away from current to target + # diff_xy_direction = target_gripper_pose[:2, 3] - current_gripper_pose[:2, 3] + # pos_diff = np.linalg.norm(diff_xy_direction) * 0.10 + # target_gripper_pose[:2, 3] = current_gripper_pose[:2, 3] + pos_diff + + # elif self.step_id==1: + # target_xyz = target_gripper_pose[-1, :3, 3] + # current_xyz = current_gripper_pose[:3, 3] + + # xyz_interp = np.linspace(current_xyz, target_xyz, len(target_xyz)) + # target_gripper_pose[:, :3, 3] = xyz_interp + + # import ipdb; ipdb.set_trace() + + last_statement = {'objects': copy.deepcopy(objects), 'target_gripper_pose': target_gripper_pose} + self.last_statement = last_statement + return target_gripper_pose, motion_type, gripper_action , "right" + + + def check_completion(self, objects): + if self.__len__()==0: + return True + goal_datapack = [self.active_obj_id, self.passive_obj_id] + self.sub_stages[self.step_id] + succ = simple_check_completion(goal_datapack, objects) + + + + + if succ: + self.step_id += 1 + return succ \ No newline at end of file diff --git a/data_gen_dependencies/action/pull.py b/data_gen_dependencies/action/pull.py new file mode 100644 index 0000000..8ada4e6 --- /dev/null +++ b/data_gen_dependencies/action/pull.py @@ -0,0 +1,59 @@ +import numpy as np + +from data_gen_dependencies.action.base import StageTemplate + + +class PullStage(StageTemplate): + def __init__(self, active_obj_id, passive_obj_id, active_element=None, passive_element=None, target_pose=np.eye(4), extra_params=None, **kwargs): + super().__init__(active_obj_id, passive_obj_id, active_element, passive_element) + self.pull_distance = passive_element['distance'] + assert self.pull_distance > 0 + self.joint_position_threshold = passive_element.get('joint_position_threshold', 0.5) + assert self.joint_position_threshold >= 0 and self.joint_position_threshold <= 1 + self.joint_direction = passive_element.get('joint_direction', 1) + assert self.joint_direction in [-1, 1] + self.joint_velocity_threshold = passive_element.get('joint_velocity_threshold', 999) + vector_direction = passive_element['direction'] + + self.extra_params = {} if extra_params is None else extra_params + if 'pull_distance' in self.extra_params: + self.pull_distance = self.extra_params['pull_distance'] + self.generate_substage(target_pose, vector_direction) + + def generate_substage(self, target_pose, vector_direction): + vector_direction = vector_direction / np.linalg.norm(vector_direction) + # moveTo pre-place position + move_pose = target_pose.copy() + move_pose[:3,3] += vector_direction * self.pull_distance + self.sub_stages.append([move_pose, None, np.eye(4), 'Straight']) + self.sub_stages.append([None, "open", np.eye(4), 'Simple']) + + free_delta_pose = np.eye(4) + free_delta_pose[2,3] = -0.03 + self.sub_stages.append([free_delta_pose, None, np.eye(4), 'local_gripper']) + + + + + def check_completion(self, objects): + if self.__len__()==0: + return True + + if self.step_id >= 0: + joint_position = objects[self.passive_obj_id].joint_position + joint_velocity = objects[self.passive_obj_id].joint_velocity + lower_bound = objects[self.passive_obj_id].part_joint_limit['lower_bound'] + upper_bound = objects[self.passive_obj_id].part_joint_limit['upper_bound'] + if self.joint_direction == 1: + succ = joint_position > lower_bound + (upper_bound - lower_bound) * self.joint_position_threshold + else: + succ = joint_position < lower_bound + (upper_bound - lower_bound) * self.joint_position_threshold + succ = succ and joint_velocity < self.joint_velocity_threshold + + if succ: + self.step_id += 1 + # else: + # import ipdb;ipdb.set_trace() + + return succ + \ No newline at end of file diff --git a/data_gen_dependencies/action/push.py b/data_gen_dependencies/action/push.py new file mode 100644 index 0000000..610795b --- /dev/null +++ b/data_gen_dependencies/action/push.py @@ -0,0 +1,95 @@ +import numpy as np + +from data_gen_dependencies.action.base import StageTemplate, pose_difference + + +class PushStage(StageTemplate): + def __init__(self, active_obj_id, passive_obj_id, active_element=None, passive_element=None, target_pose=np.eye(4), vector_direction=None, extra_params=None, **kwargs): + super().__init__(active_obj_id, passive_obj_id, active_element, passive_element) + + extra_params = {} if extra_params is None else extra_params + + self.pre_distance = extra_params.get('pre_distance', -0.03) + self.move_distance = extra_params.get('move_distance', 0.14) + + vector_direction = vector_direction / np.linalg.norm(vector_direction) + self.goal_transform = np.eye(4) + self.goal_transform[:3,3] = vector_direction * self.move_distance + + if passive_element is not None: + self.joint_position_threshold = passive_element.get('joint_position_threshold', 0.5) + assert self.joint_position_threshold >= 0 and self.joint_position_threshold <= 1 + self.joint_direction = passive_element.get('joint_direction', 1) + assert self.joint_direction in [-1, 1] + self.joint_velocity_threshold = passive_element.get('joint_velocity_threshold', 999) + + self.generate_substage(target_pose, vector_direction) + + + def generate_substage(self, target_pose, vector_direction): + vector_direction = vector_direction / np.linalg.norm(vector_direction) + # moveTo pre-place position + pre_pose = target_pose.copy() + pre_pose[:3,3] += vector_direction * self.pre_distance + self.sub_stages.append([pre_pose, 'close', np.eye(4), 'AvoidObs']) + + # insert + move_pose = target_pose.copy() + move_pose[:3,3] += vector_direction * self.move_distance + self.sub_stages.append([move_pose, None, np.eye(4), 'Straight']) + + + def check_completion(self, objects): + if self.__len__()==0: + return True + + # TODO 铰接joitn判定 + + if self.step_id==0: + succ = True + + if self.step_id>=1: + if objects[self.passive_obj_id].is_articulated: + joint_position = objects[self.passive_obj_id].joint_position + joint_velocity = objects[self.passive_obj_id].joint_velocity + lower_bound = objects[self.passive_obj_id].part_joint_limit['lower_bound'] + upper_bound = objects[self.passive_obj_id].part_joint_limit['upper_bound'] + if self.joint_direction == 1: + succ = joint_position > lower_bound + (upper_bound - lower_bound) * self.joint_position_threshold + else: + succ = joint_position < lower_bound + (upper_bound - lower_bound) * self.joint_position_threshold + succ = succ and joint_velocity < self.joint_velocity_threshold + else: + sub_stage = self.sub_stages[self.step_id] + + last_pose = self.last_statement['objects'][self.passive_obj_id].obj_pose + target_obj_pose = last_pose @ self.goal_transform + current_obj_pose = objects[self.passive_obj_id].obj_pose + pos_diff, _ = pose_difference(current_obj_pose, target_obj_pose) + succ = pos_diff < 0.04 + + if succ: + self.step_id += 1 + return succ + + +class ClickStage(StageTemplate): + def __init__(self, active_obj_id, passive_obj_id, active_element=None, passive_element=None, target_pose=None, vector_direction=None, **kwargs): + super().__init__(active_obj_id, passive_obj_id, active_element, passive_element) + self.pre_distance = -0.03 + self.move_distance = 0.00 + self.generate_substage(target_pose, vector_direction) + + + def generate_substage(self, target_pose, vector_direction): + vector_direction = vector_direction / np.linalg.norm(vector_direction) + # moveTo pre-place position + pre_pose = target_pose.copy() + pre_pose[:3,3] += vector_direction * self.pre_distance + self.sub_stages.append([pre_pose, 'close', np.eye(4), 'AvoidObs']) + + # insert + move_pose = target_pose.copy() + move_pose[:3,3] += vector_direction * self.move_distance + self.sub_stages.append([move_pose, None, np.eye(4), 'Simple']) + diff --git a/data_gen_dependencies/action/slide.py b/data_gen_dependencies/action/slide.py new file mode 100644 index 0000000..96f4ea8 --- /dev/null +++ b/data_gen_dependencies/action/slide.py @@ -0,0 +1,47 @@ +import numpy as np + +from data_gen_dependencies.action.base import StageTemplate, pose_difference + + +class SlideStage(StageTemplate): + def __init__(self, active_obj_id, passive_obj_id, active_element=None, passive_element=None, target_pose=np.eye(4), **kwargs): + super().__init__(active_obj_id, passive_obj_id, active_element, passive_element) + + self.pre_distance = 0.0 + self.move_distance = 0.08 + vector_direction = passive_element['direction'] + self.generate_substage(target_pose, vector_direction) + + + + def generate_substage(self, target_pose, vector_direction): + vector_direction = vector_direction / np.linalg.norm(vector_direction) + # slide + move_pose = target_pose.copy() + move_pose[:3,3] += vector_direction * self.move_distance + self.sub_stages.append([move_pose, None, np.eye(4), 'Simple']) + + # open gripper + self.sub_stages.append([None, 'open', np.eye(4), None]) + + + + def check_completion(self, objects): + if self.__len__()==0: + return True + sub_stage = self.sub_stages[self.step_id] + move_transform, gripper_action = sub_stage[0], sub_stage[1] + if gripper_action=='open': + self.step_id += 1 + return True + last_pose = self.last_statement['objects'][self.passive_obj_id].obj_pose + target_obj_pose = last_pose @ move_transform + current_obj_pose = objects[self.passive_obj_id].obj_pose + pos_diff, _ = pose_difference(current_obj_pose, target_obj_pose) + succ = pos_diff < 0.04 + print(pos_diff, succ) + if succ: + self.step_id += 1 + return succ + + diff --git a/data_gen_dependencies/aimdk/protocol/CMakeLists.txt b/data_gen_dependencies/aimdk/protocol/CMakeLists.txt new file mode 100644 index 0000000..3e10083 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/CMakeLists.txt @@ -0,0 +1,8 @@ +cmake_minimum_required(VERSION 3.16) + +# ------------------------------------------------------------- +# Create the project + +aimrte_project(NAME aimrt_comm_protocol ROOT_NAMESPACE "aimdk;protocol") +aimrte_add_subdirectories(.) +aimrte_project_complete() diff --git a/data_gen_dependencies/aimdk/protocol/README.md b/data_gen_dependencies/aimdk/protocol/README.md new file mode 100644 index 0000000..a420677 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/README.md @@ -0,0 +1,25 @@ +# AimDK 接口协议 + +## 简介 + +本仓库定义了 AgiBot 产品的所有跨模块的接口协议,包含对外的部分与仅在内部流通的部分。 + +本协议的根命名空间是 **aimdk::protocol::** ,我们按不同模块进一步划分了子目录,并以这些模块的名,命名这些子目录及其下一级命名空间。 + + + +## 约定 + +协议编写规范见 [《研发规划/协议规范》](https://agirobot.feishu.cn/wiki/YBe5wYMtiicci2kBgBxc8KUHngb?fromScene=spaceOverview&theme=LIGHT&contentTheme=DARK) 。 + +我们需要避免冗余协议内容的出现,若多个模块出现相同的协议内容,若其语义拥有公共属性,应将其放到 common 子协议目录下。 + +我们将每个协议内容的定义,以注释的形式放在协议源文件中,但在每个子目录(包括本目录)设立 README.md,作为简单的检索之用, +让阅读者更方便找到所需的协议文件。 + + + +## 模块目录 + +- [公共部分](./common/README.md) +- [定位模块(SLAM)](./slam/README.md) diff --git a/data_gen_dependencies/aimdk/protocol/common/CMakeLists.txt b/data_gen_dependencies/aimdk/protocol/common/CMakeLists.txt new file mode 100644 index 0000000..392c6c3 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/CMakeLists.txt @@ -0,0 +1,5 @@ +# ------------------------------------------------------------- +# Protocol: aimdk::protocol::common +# ------------------------------------------------------------- + +aimrte_protocol() diff --git a/data_gen_dependencies/aimdk/protocol/common/README.md b/data_gen_dependencies/aimdk/protocol/common/README.md new file mode 100644 index 0000000..153f6d9 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/README.md @@ -0,0 +1,9 @@ +# AimDK common 协议 + +## 简介 + +本目录放置了所有模块公用、复用的协议内容的定义。 + +## 检索 + +- [所有消息、请求、响应都有的 header](./header.proto) diff --git a/data_gen_dependencies/aimdk/protocol/common/battery_state.proto b/data_gen_dependencies/aimdk/protocol/common/battery_state.proto new file mode 100644 index 0000000..445be5d --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/battery_state.proto @@ -0,0 +1,95 @@ +syntax = "proto3"; +package aimdk.protocol; + +import "aimdk/protocol/common/timestamp.proto"; + + +/** + *@brief 电池状态 + */ +enum PowerSupplyStatus{ + PowerSupplyStatus_UNDEFINED = 0; + PowerSupplyStatus_CHARGING = 1; + PowerSupplyStatus_DISCHARGING = 2; + PowerSupplyStatus_NOT_CHARGING = 3; + PowerSupplyStatus_FULL = 4; +} + +/** + *@brief 电池健康状态 + */ +enum PowerSupplyHealth{ + PowerSupplyHealth_UNDEFINED= 0; + PowerSupplyHealth_GOOD = 1; + PowerSupplyHealth_OVERHEAT = 2; + PowerSupplyHealth_DEAD = 3; + PowerSupplyHealth_OVERVOLTAGE = 4; + PowerSupplyHealth_UNSPEC_FAILURE = 5; + PowerSupplyHealth_COLD = 6; + PowerSupplyHealth_WATCHDOG_TIMER_EXPIRE = 7; + PowerSupplyHealth_SAFETY_TIMER_EXPIRE = 8; +} + + +/** + * @brief 电池技术 + */ +enum PowerSupplyTechnology{ + PowerSupplyTechnology_UNDEFINED = 0; + PowerSupplyTechnology_NIMH = 1; + PowerSupplyTechnology_LION = 2; + PowerSupplyTechnology_LIPO = 3; + PowerSupplyTechnology_LIFE = 4; + PowerSupplyTechnology_NICD = 5; + PowerSupplyTechnology_LIMN = 6; +} + +/** + * @brief 电池状态 对应ROS中的: sensor_msgs/BatteryState.msg + */ +message BatteryState{ + // 时间辍 + Timestamp timestamp = 1; + + // 电池电压 + float voltage = 2; + + // 电流 + float current = 3; + + // 电量 + float charge = 4; + + // 电池容量 + float capacity = 5; + + // 设计容量 + float design_capacity = 6; + + // 电池百分比 + float percentage = 7; + + // 电池状态 + PowerSupplyStatus power_supply_status = 8; + + // 电池健康状态 + PowerSupplyHealth power_supply_health = 9; + + // 电池技术 + PowerSupplyTechnology power_supply_technology = 10; + + // 电池单元电压 + repeated float cell_voltage = 11; + + // 电池位置 + string location = 12; + + // 电池序列号 + string serial_number = 13; + + // 电池功率 + float power = 14; + + // 电池温度 + float temperature = 15; +}; \ No newline at end of file diff --git a/data_gen_dependencies/aimdk/protocol/common/box.proto b/data_gen_dependencies/aimdk/protocol/common/box.proto new file mode 100644 index 0000000..b270215 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/box.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; +package aimdk.protocol; + + +/** + * @brief A box in a 2D space. + */ +message Box { + float x1 = 1; + float y1 = 2; + float x2 = 3; + float y2 = 4; +} diff --git a/data_gen_dependencies/aimdk/protocol/common/common.proto b/data_gen_dependencies/aimdk/protocol/common/common.proto new file mode 100644 index 0000000..d3c1f06 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/common.proto @@ -0,0 +1,20 @@ +syntax = "proto3"; +package aimdk.protocol; + +import public "aimdk/protocol/common/timestamp.proto"; +import public "aimdk/protocol/common/vec2.proto"; +import public "aimdk/protocol/common/vec3.proto"; +import public "aimdk/protocol/common/odometry.proto"; +import public "aimdk/protocol/common/quaternion.proto"; +import public "aimdk/protocol/common/se2_pose.proto"; +import public "aimdk/protocol/common/se3_pose.proto"; +import public "aimdk/protocol/common/se2_velocity.proto"; +import public "aimdk/protocol/common/se3_velocity.proto"; +import public "aimdk/protocol/common/se2_acceleration.proto"; +import public "aimdk/protocol/common/se3_acceleration.proto"; +import public "aimdk/protocol/common/se3_trajectory.proto"; +import public "aimdk/protocol/common/wrench.proto"; +import public "aimdk/protocol/common/twist.proto"; +import public "aimdk/protocol/common/pixel_pose.proto"; +import public "aimdk/protocol/common/image.proto"; +import public "aimdk/protocol/common/rpc.proto"; \ No newline at end of file diff --git a/data_gen_dependencies/aimdk/protocol/common/header.proto b/data_gen_dependencies/aimdk/protocol/common/header.proto new file mode 100644 index 0000000..4fa8b78 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/header.proto @@ -0,0 +1,71 @@ +syntax = "proto3"; +package aimdk.protocol; + +import "aimdk/protocol/common/timestamp.proto"; + + +/** + * @brief 一般消息的 header + */ +message Header { + // sequence ID: consecutively increasing ID + uint32 seq = 1; + + // timestamp + Timestamp timestamp = 2; + + // Frame this data is associated with + string frame_id = 3; +} + +/** + * @brief RPC 请求体的 header + */ +message RequestHeader { + // timestamp + Timestamp timestamp = 1; +} + +/** + * @brief RPC 响应体的 header + */ +message ResponseHeader { + // 处理结果 + // - 为0: 成功 + // - 非0: 失败 + uint64 code = 1; + + // 处理结果描述 + string msg = 2; + + // 时间戳 + Timestamp timestamp = 3; +} + +/** + * @brief 可阻塞的 RPC 请求体的 header + */ +message BlockableRequestHeader { + // 时间戳 + Timestamp timestamp = 1; + + // 是否阻塞调用(默认 false) + bool blocked = 2; +} + +message BlockableResponseHeader { + // 处理结果 + // - 为0: 成功 + // - 非0: 失败 + uint64 code = 1; + + // 处理结果描述 + string msg = 2; + + // 时间戳 + Timestamp timestamp = 3; + + // 请求关联的 rpc id,若设置了非阻塞模式,该字段可用于调用者向执行者定期查询 rpc 执行情况, + // 该值需由执行者维护并设置,默认为零无效。 + uint64 id = 4; +} diff --git a/data_gen_dependencies/aimdk/protocol/common/header_pb2.py b/data_gen_dependencies/aimdk/protocol/common/header_pb2.py new file mode 100644 index 0000000..ded4480 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/header_pb2.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: aimdk/protocol/common/header.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from aimdk.protocol.common import timestamp_pb2 as aimdk_dot_protocol_dot_common_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"aimdk/protocol/common/header.proto\x12\x0e\x61imdk.protocol\x1a%aimdk/protocol/common/timestamp.proto\"U\n\x06Header\x12\x0b\n\x03seq\x18\x01 \x01(\r\x12,\n\ttimestamp\x18\x02 \x01(\x0b\x32\x19.aimdk.protocol.Timestamp\x12\x10\n\x08\x66rame_id\x18\x03 \x01(\t\"=\n\rRequestHeader\x12,\n\ttimestamp\x18\x01 \x01(\x0b\x32\x19.aimdk.protocol.Timestamp\"Y\n\x0eResponseHeader\x12\x0c\n\x04\x63ode\x18\x01 \x01(\x04\x12\x0b\n\x03msg\x18\x02 \x01(\t\x12,\n\ttimestamp\x18\x03 \x01(\x0b\x32\x19.aimdk.protocol.Timestamp\"W\n\x16\x42lockableRequestHeader\x12,\n\ttimestamp\x18\x01 \x01(\x0b\x32\x19.aimdk.protocol.Timestamp\x12\x0f\n\x07\x62locked\x18\x02 \x01(\x08\"n\n\x17\x42lockableResponseHeader\x12\x0c\n\x04\x63ode\x18\x01 \x01(\x04\x12\x0b\n\x03msg\x18\x02 \x01(\t\x12,\n\ttimestamp\x18\x03 \x01(\x0b\x32\x19.aimdk.protocol.Timestamp\x12\n\n\x02id\x18\x04 \x01(\x04\x62\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'aimdk.protocol.common.header_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals['_HEADER']._serialized_start=93 + _globals['_HEADER']._serialized_end=178 + _globals['_REQUESTHEADER']._serialized_start=180 + _globals['_REQUESTHEADER']._serialized_end=241 + _globals['_RESPONSEHEADER']._serialized_start=243 + _globals['_RESPONSEHEADER']._serialized_end=332 + _globals['_BLOCKABLEREQUESTHEADER']._serialized_start=334 + _globals['_BLOCKABLEREQUESTHEADER']._serialized_end=421 + _globals['_BLOCKABLERESPONSEHEADER']._serialized_start=423 + _globals['_BLOCKABLERESPONSEHEADER']._serialized_end=533 +# @@protoc_insertion_point(module_scope) diff --git a/data_gen_dependencies/aimdk/protocol/common/image.proto b/data_gen_dependencies/aimdk/protocol/common/image.proto new file mode 100644 index 0000000..9508e1b --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/image.proto @@ -0,0 +1,22 @@ +syntax = "proto3"; +package aimdk.protocol; + +import "aimdk/protocol/common/timestamp.proto"; + + +/** + * @brief 给客户端推送视频流的图像数据格式 + */ +message PubImageData { + // 单帧高 + int32 height = 1; + + // 单帧宽 + int32 width = 2; + + // 时间戳 + Timestamp timestamp = 3; + + // 图片数据 + bytes image_data = 4; +} diff --git a/data_gen_dependencies/aimdk/protocol/common/joint.proto b/data_gen_dependencies/aimdk/protocol/common/joint.proto new file mode 100644 index 0000000..021ab47 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/joint.proto @@ -0,0 +1,100 @@ +syntax = "proto3"; +package aimdk.protocol; + + +// 关节信息 +message JointState { + /** + * 关节名称 + */ + string name = 1; + + /** + * 关节消息的序号 + */ + uint32 sequence = 2; + + /** + * 关节角度,单位:弧度 or m + */ + double position = 3; + + /** + * 关节角速度,单位:弧度/秒 or m/s + */ + double velocity = 4; + + /** + * 关节扭矩,单位:N or N*m + */ + double effort = 5; +} + +message JointCommand { + /** + * 关节名称 + */ + string name = 1; + + /** + * 关节消息的序号 + */ + uint32 sequence = 2; + + /** + * 关节角度,单位:弧度 or m + */ + double position = 3; + + /** + * 关节角速度,单位:弧度/秒 or m/s + */ + double velocity = 4; + + /** + * 关节扭矩,单位:N + */ + double effort = 5; + + /** + * 阻尼,单位: N·m/rad or N/m + */ + double stiffness = 6; + + /** + * 阻尼单位: N·s/m or N·m·s/rad + */ + double damping = 7; +} + +message JointParam { + /** + * 关节名称 + */ + string name = 1; + + /** + * 关节位置上限,单位:弧度或者米 + */ + double position_upper_limit = 2; + + /** + * 关节位置下限,单位:弧度或者米 + */ + double position_lower_limit = 3; + + /** + * 关节速度限制,单位:弧度/秒或者米/秒 + */ + double velocity_limit = 4; + + /** + * 关节加速度限制,单位:弧度/秒^2或者米/秒^2 + */ + double acceleration_limit = 5; + + /** + * 关节扭矩限制,单位:N或者N·m + */ + double effort_limit = 6; +} diff --git a/data_gen_dependencies/aimdk/protocol/common/joint_pb2.py b/data_gen_dependencies/aimdk/protocol/common/joint_pb2.py new file mode 100644 index 0000000..4f7183d --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/joint_pb2.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: aimdk/protocol/common/joint.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n!aimdk/protocol/common/joint.proto\x12\x0e\x61imdk.protocol\"`\n\nJointState\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x10\n\x08sequence\x18\x02 \x01(\r\x12\x10\n\x08position\x18\x03 \x01(\x01\x12\x10\n\x08velocity\x18\x04 \x01(\x01\x12\x0e\n\x06\x65\x66\x66ort\x18\x05 \x01(\x01\"\x86\x01\n\x0cJointCommand\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x10\n\x08sequence\x18\x02 \x01(\r\x12\x10\n\x08position\x18\x03 \x01(\x01\x12\x10\n\x08velocity\x18\x04 \x01(\x01\x12\x0e\n\x06\x65\x66\x66ort\x18\x05 \x01(\x01\x12\x11\n\tstiffness\x18\x06 \x01(\x01\x12\x0f\n\x07\x64\x61mping\x18\x07 \x01(\x01\"\xa0\x01\n\nJointParam\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1c\n\x14position_upper_limit\x18\x02 \x01(\x01\x12\x1c\n\x14position_lower_limit\x18\x03 \x01(\x01\x12\x16\n\x0evelocity_limit\x18\x04 \x01(\x01\x12\x1a\n\x12\x61\x63\x63\x65leration_limit\x18\x05 \x01(\x01\x12\x14\n\x0c\x65\x66\x66ort_limit\x18\x06 \x01(\x01\x62\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'aimdk.protocol.common.joint_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals['_JOINTSTATE']._serialized_start=53 + _globals['_JOINTSTATE']._serialized_end=149 + _globals['_JOINTCOMMAND']._serialized_start=152 + _globals['_JOINTCOMMAND']._serialized_end=286 + _globals['_JOINTPARAM']._serialized_start=289 + _globals['_JOINTPARAM']._serialized_end=449 +# @@protoc_insertion_point(module_scope) diff --git a/data_gen_dependencies/aimdk/protocol/common/odometry.proto b/data_gen_dependencies/aimdk/protocol/common/odometry.proto new file mode 100644 index 0000000..e904b34 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/odometry.proto @@ -0,0 +1,17 @@ +syntax = "proto3"; +package aimdk.protocol; + +import public "aimdk/protocol/common/timestamp.proto"; +import public "aimdk/protocol/common/se3_pose.proto"; +import public "aimdk/protocol/common/twist.proto"; + + +/** + * odometry + */ +message Odometry { + Timestamp timestamp = 1; + string child_frame_id = 2; + SE3Pose pose = 3; + Twist twist = 4; +} diff --git a/data_gen_dependencies/aimdk/protocol/common/pixel_pose.proto b/data_gen_dependencies/aimdk/protocol/common/pixel_pose.proto new file mode 100644 index 0000000..9be1ca0 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/pixel_pose.proto @@ -0,0 +1,31 @@ +syntax = "proto3"; +package aimdk.protocol; + + +/** + * @brief 2d 像素坐标 + */ +message Pixel { + // 像素横坐标 + int32 u = 1; + + // 像素纵坐标 + int32 v = 2; +} + +/** + * @brief 2d 像素位姿 + * @note 坐标系示意 + * +--------> u + * | - (angle) + * | - + * | * + * v V + */ +message PixelPose { + // 2d 像素坐标 + Pixel position = 1; + + // 角度从u轴转向v轴的角度,单位:弧度 + double angle = 2; +} diff --git a/data_gen_dependencies/aimdk/protocol/common/qr_pose.proto b/data_gen_dependencies/aimdk/protocol/common/qr_pose.proto new file mode 100644 index 0000000..3f60d23 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/qr_pose.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; +package aimdk.protocol; + +import public "aimdk/protocol/common/se3_pose.proto"; + + +/** + * QR位姿 + */ +message QRPose { + int32 qr_code = 1; // 二维码值 + SE3Pose pose = 2; // 6D位姿 +} diff --git a/data_gen_dependencies/aimdk/protocol/common/quaternion.proto b/data_gen_dependencies/aimdk/protocol/common/quaternion.proto new file mode 100644 index 0000000..f318d30 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/quaternion.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; +package aimdk.protocol; + + +/** + * 四元数 + */ +message Quaternion { + double x = 1; + double y = 2; + double z = 3; + double w = 4; +} \ No newline at end of file diff --git a/data_gen_dependencies/aimdk/protocol/common/quaternion_pb2.py b/data_gen_dependencies/aimdk/protocol/common/quaternion_pb2.py new file mode 100644 index 0000000..697fa8e --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/quaternion_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: aimdk/protocol/common/quaternion.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&aimdk/protocol/common/quaternion.proto\x12\x0e\x61imdk.protocol\"8\n\nQuaternion\x12\t\n\x01x\x18\x01 \x01(\x01\x12\t\n\x01y\x18\x02 \x01(\x01\x12\t\n\x01z\x18\x03 \x01(\x01\x12\t\n\x01w\x18\x04 \x01(\x01\x62\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'aimdk.protocol.common.quaternion_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals['_QUATERNION']._serialized_start=58 + _globals['_QUATERNION']._serialized_end=114 +# @@protoc_insertion_point(module_scope) diff --git a/data_gen_dependencies/aimdk/protocol/common/rpc.proto b/data_gen_dependencies/aimdk/protocol/common/rpc.proto new file mode 100644 index 0000000..0c966e4 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/rpc.proto @@ -0,0 +1,54 @@ +syntax = "proto3"; +package aimdk.protocol; + +import public "aimdk/protocol/common/timestamp.proto"; +import public "aimdk/protocol/common/header.proto"; + +/** + * 状态定义 + */ +enum CommonState { + CommonState_UNKNOWN = 0; // 未知 + CommonState_SUCCESS = 1; // 成功 + CommonState_FAILURE = 2; // 失败 + CommonState_ABORTED = 3; // 中止 + CommonState_TIMEOUT = 4; // 超时 + CommonState_INVALID = 5; // 无效 + CommonState_IN_MANUAL = 6; // 手动 + CommonState_NOT_READY = 100; // 未就绪 + CommonState_PENDING = 200; // 等待中 + CommonState_CREATED = 300; // 已创建 + CommonState_RUNNING = 400; // 运行中 +} + +/** + * 通用请求类型 + */ +message CommonRequest { + RequestHeader header = 1; +} + +/** + * 通用返回类型 + */ +message CommonResponse { + ResponseHeader header = 1; + CommonState state = 2; +} + +/** + * 通用Task请求类型 + */ +message CommonTaskRequest { + RequestHeader header = 1; + uint64 task_id = 2; +} + +/** + * 通用Task返回类型 + */ +message CommonTaskResponse { + ResponseHeader header = 1; + uint64 task_id = 2; + CommonState state = 3; +} diff --git a/data_gen_dependencies/aimdk/protocol/common/rpc_pb2.py b/data_gen_dependencies/aimdk/protocol/common/rpc_pb2.py new file mode 100644 index 0000000..49ccb92 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/rpc_pb2.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: aimdk/protocol/common/rpc.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from aimdk.protocol.common import timestamp_pb2 as aimdk_dot_protocol_dot_common_dot_timestamp__pb2 +from aimdk.protocol.common import header_pb2 as aimdk_dot_protocol_dot_common_dot_header__pb2 + +from aimdk.protocol.common.timestamp_pb2 import * +from aimdk.protocol.common.header_pb2 import * + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1f\x61imdk/protocol/common/rpc.proto\x12\x0e\x61imdk.protocol\x1a%aimdk/protocol/common/timestamp.proto\x1a\"aimdk/protocol/common/header.proto\">\n\rCommonRequest\x12-\n\x06header\x18\x01 \x01(\x0b\x32\x1d.aimdk.protocol.RequestHeader\"l\n\x0e\x43ommonResponse\x12.\n\x06header\x18\x01 \x01(\x0b\x32\x1e.aimdk.protocol.ResponseHeader\x12*\n\x05state\x18\x02 \x01(\x0e\x32\x1b.aimdk.protocol.CommonState\"S\n\x11\x43ommonTaskRequest\x12-\n\x06header\x18\x01 \x01(\x0b\x32\x1d.aimdk.protocol.RequestHeader\x12\x0f\n\x07task_id\x18\x02 \x01(\x04\"\x81\x01\n\x12\x43ommonTaskResponse\x12.\n\x06header\x18\x01 \x01(\x0b\x32\x1e.aimdk.protocol.ResponseHeader\x12\x0f\n\x07task_id\x18\x02 \x01(\x04\x12*\n\x05state\x18\x03 \x01(\x0e\x32\x1b.aimdk.protocol.CommonState*\xa7\x02\n\x0b\x43ommonState\x12\x17\n\x13\x43ommonState_UNKNOWN\x10\x00\x12\x17\n\x13\x43ommonState_SUCCESS\x10\x01\x12\x17\n\x13\x43ommonState_FAILURE\x10\x02\x12\x17\n\x13\x43ommonState_ABORTED\x10\x03\x12\x17\n\x13\x43ommonState_TIMEOUT\x10\x04\x12\x17\n\x13\x43ommonState_INVALID\x10\x05\x12\x19\n\x15\x43ommonState_IN_MANUAL\x10\x06\x12\x19\n\x15\x43ommonState_NOT_READY\x10\x64\x12\x18\n\x13\x43ommonState_PENDING\x10\xc8\x01\x12\x18\n\x13\x43ommonState_CREATED\x10\xac\x02\x12\x18\n\x13\x43ommonState_RUNNING\x10\x90\x03P\x00P\x01\x62\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'aimdk.protocol.common.rpc_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals['_COMMONSTATE']._serialized_start=518 + _globals['_COMMONSTATE']._serialized_end=813 + _globals['_COMMONREQUEST']._serialized_start=126 + _globals['_COMMONREQUEST']._serialized_end=188 + _globals['_COMMONRESPONSE']._serialized_start=190 + _globals['_COMMONRESPONSE']._serialized_end=298 + _globals['_COMMONTASKREQUEST']._serialized_start=300 + _globals['_COMMONTASKREQUEST']._serialized_end=383 + _globals['_COMMONTASKRESPONSE']._serialized_start=386 + _globals['_COMMONTASKRESPONSE']._serialized_end=515 +# @@protoc_insertion_point(module_scope) diff --git a/data_gen_dependencies/aimdk/protocol/common/rpy.proto b/data_gen_dependencies/aimdk/protocol/common/rpy.proto new file mode 100644 index 0000000..19fc50a --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/rpy.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; +package aimdk.protocol; + + +/** + * @brief RPY角度姿态表示 + */ +message Rpy { + double rw = 1; + double rx = 2; // 绕固定轴X旋转角度,单位:rad + double ry = 3; // 绕固定轴Y旋转角度,单位:rad + double rz = 4; // 绕固定轴Z旋转角度,单位:rad +} \ No newline at end of file diff --git a/data_gen_dependencies/aimdk/protocol/common/rpy_pb2.py b/data_gen_dependencies/aimdk/protocol/common/rpy_pb2.py new file mode 100644 index 0000000..df876df --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/rpy_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: aimdk/protocol/common/rpy.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1f\x61imdk/protocol/common/rpy.proto\x12\x0e\x61imdk.protocol\"5\n\x03Rpy\x12\n\n\x02rw\x18\x01 \x01(\x01\x12\n\n\x02rx\x18\x02 \x01(\x01\x12\n\n\x02ry\x18\x03 \x01(\x01\x12\n\n\x02rz\x18\x04 \x01(\x01\x62\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'aimdk.protocol.common.rpy_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals['_RPY']._serialized_start=51 + _globals['_RPY']._serialized_end=104 +# @@protoc_insertion_point(module_scope) diff --git a/data_gen_dependencies/aimdk/protocol/common/rpy_pb2_grpc.py b/data_gen_dependencies/aimdk/protocol/common/rpy_pb2_grpc.py new file mode 100644 index 0000000..2daafff --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/rpy_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/data_gen_dependencies/aimdk/protocol/common/se2_acceleration.proto b/data_gen_dependencies/aimdk/protocol/common/se2_acceleration.proto new file mode 100644 index 0000000..e576c85 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/se2_acceleration.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; +package aimdk.protocol; + +import public "aimdk/protocol/common/vec2.proto"; + + +/** + * SE2 加速度 + */ +message SE2Acceleration { + Vec2 linear = 1; // 线加速度,单位:米/秒^2 + Vec2 angular = 2; // 角加速度,单位:弧度/秒^2 +} diff --git a/data_gen_dependencies/aimdk/protocol/common/se2_pose.proto b/data_gen_dependencies/aimdk/protocol/common/se2_pose.proto new file mode 100644 index 0000000..61c1032 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/se2_pose.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; +package aimdk.protocol; + +import public "aimdk/protocol/common/vec2.proto"; + + +/** + * SE2位姿 + */ +message SE2Pose { + Vec2 position = 1; // 2d 位姿,单位:米 + double angle = 2; // 角度,单位:弧度 +} diff --git a/data_gen_dependencies/aimdk/protocol/common/se2_velocity.proto b/data_gen_dependencies/aimdk/protocol/common/se2_velocity.proto new file mode 100644 index 0000000..ba7802d --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/se2_velocity.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; +package aimdk.protocol; + +import public "aimdk/protocol/common/vec2.proto"; + + +/** + * SE2 速度 + */ +message SE2Velocity { + Vec2 linear = 1; // 线速度,单位:米/秒 + Vec2 angular = 2; // 角速度,单位:弧度/秒 +} diff --git a/data_gen_dependencies/aimdk/protocol/common/se3_acceleration.proto b/data_gen_dependencies/aimdk/protocol/common/se3_acceleration.proto new file mode 100644 index 0000000..ee9641d --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/se3_acceleration.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; +package aimdk.protocol; + +import public "aimdk/protocol/common/vec3.proto"; +import public "aimdk/protocol/common/quaternion.proto"; + + +/** + * SE3 加速度 + */ +message SE3Acceleration { + Vec3 linear = 1; // 线加速度,单位:米/秒^2 + Vec3 angular = 2; // 角加速度,单位:弧度/秒^2 +} \ No newline at end of file diff --git a/data_gen_dependencies/aimdk/protocol/common/se3_pose.proto b/data_gen_dependencies/aimdk/protocol/common/se3_pose.proto new file mode 100644 index 0000000..d4b3be7 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/se3_pose.proto @@ -0,0 +1,23 @@ +syntax = "proto3"; +package aimdk.protocol; + +import public "aimdk/protocol/common/vec3.proto"; +import public "aimdk/protocol/common/quaternion.proto"; +import public "aimdk/protocol/common/rpy.proto"; + + +/** + * SE3位姿(四元数姿态表示) + */ +message SE3Pose { + Vec3 position = 1; // 3d 位姿,单位:米 + Quaternion orientation = 2; // 四元数 +} + +/** + * SE3位姿(RPY角度表示) + */ +message SE3RpyPose { + Vec3 position = 1; // 3d 位姿,单位:米 + Rpy rpy = 2; // RPY角度 +} diff --git a/data_gen_dependencies/aimdk/protocol/common/se3_pose_pb2.py b/data_gen_dependencies/aimdk/protocol/common/se3_pose_pb2.py new file mode 100644 index 0000000..2c4a938 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/se3_pose_pb2.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: aimdk/protocol/common/se3_pose.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from aimdk.protocol.common import vec3_pb2 as aimdk_dot_protocol_dot_common_dot_vec3__pb2 +from aimdk.protocol.common import quaternion_pb2 as aimdk_dot_protocol_dot_common_dot_quaternion__pb2 +from aimdk.protocol.common import rpy_pb2 as aimdk_dot_protocol_dot_common_dot_rpy__pb2 + +from aimdk.protocol.common.vec3_pb2 import * +from aimdk.protocol.common.quaternion_pb2 import * +from aimdk.protocol.common.rpy_pb2 import * + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$aimdk/protocol/common/se3_pose.proto\x12\x0e\x61imdk.protocol\x1a aimdk/protocol/common/vec3.proto\x1a&aimdk/protocol/common/quaternion.proto\x1a\x1f\x61imdk/protocol/common/rpy.proto\"b\n\x07SE3Pose\x12&\n\x08position\x18\x01 \x01(\x0b\x32\x14.aimdk.protocol.Vec3\x12/\n\x0borientation\x18\x02 \x01(\x0b\x32\x1a.aimdk.protocol.Quaternion\"V\n\nSE3RpyPose\x12&\n\x08position\x18\x01 \x01(\x0b\x32\x14.aimdk.protocol.Vec3\x12 \n\x03rpy\x18\x02 \x01(\x0b\x32\x13.aimdk.protocol.RpyP\x00P\x01P\x02\x62\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'aimdk.protocol.common.se3_pose_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals['_SE3POSE']._serialized_start=163 + _globals['_SE3POSE']._serialized_end=261 + _globals['_SE3RPYPOSE']._serialized_start=263 + _globals['_SE3RPYPOSE']._serialized_end=349 +# @@protoc_insertion_point(module_scope) diff --git a/data_gen_dependencies/aimdk/protocol/common/se3_trajectory.proto b/data_gen_dependencies/aimdk/protocol/common/se3_trajectory.proto new file mode 100644 index 0000000..f716025 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/se3_trajectory.proto @@ -0,0 +1,25 @@ +syntax = "proto3"; +package aimdk.protocol; + +import public "aimdk/protocol/common/se3_acceleration.proto"; +import public "aimdk/protocol/common/se3_velocity.proto"; +import public "aimdk/protocol/common/se3_pose.proto"; +import public "aimdk/protocol/common/timestamp.proto"; + + +/** + * SE3轨迹点 + */ +message SE3TrajectoryPoint { + Timestamp timestamp = 1; + SE3Pose pose = 2; + SE3Velocity velocity = 3; + SE3Acceleration acceleration = 4; +} + +/** + * SE3轨迹 + */ +message SE3Trajectory { + repeated SE3TrajectoryPoint points = 1; +} diff --git a/data_gen_dependencies/aimdk/protocol/common/se3_velocity.proto b/data_gen_dependencies/aimdk/protocol/common/se3_velocity.proto new file mode 100644 index 0000000..c1fd93e --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/se3_velocity.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; +package aimdk.protocol; + +import public "aimdk/protocol/common/vec3.proto"; + + +/** + * SE3 速度 + */ +message SE3Velocity { + Vec3 linear = 1; // 线速度,单位:米/秒 + Vec3 angular = 2; // 角速度,单位:弧度/秒 +} \ No newline at end of file diff --git a/data_gen_dependencies/aimdk/protocol/common/timestamp.proto b/data_gen_dependencies/aimdk/protocol/common/timestamp.proto new file mode 100644 index 0000000..a8dedcc --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/timestamp.proto @@ -0,0 +1,48 @@ +syntax = "proto3"; +package aimdk.protocol; + + +/** + * 时间戳 + */ +message Timestamp { + // Represents seconds of UTC time since Unix epoch + // 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + // 9999-12-31T23:59:59Z inclusive. + int64 seconds = 1; + + // Non-negative fractions of a second at nanosecond resolution. Negative + // second values with fractions must still have non-negative nanos values + // that count forward in time. Must be from 0 to 999,999,999 + // inclusive. + int32 nanos = 2; + + // miliseconds since epoch for counts using by frontend. + int64 ms_since_epoch = 3; +} + +// # Examples +// +// Example 1: Compute Timestamp from POSIX `time()`. +// +// Timestamp timestamp; +// timestamp.set_seconds(time(NULL)); +// timestamp.set_nanos(0); +// +// Example 2: Compute Timestamp from POSIX `gettimeofday()`. +// +// struct timeval tv; +// gettimeofday(&tv, NULL); +// +// Timestamp timestamp; +// timestamp.set_seconds(tv.tv_sec); +// timestamp.set_nanos(tv.tv_usec * 1000); +// +// Example 3: Compute Timestamp from C++ `std::chrono::system_clock::now()`. +// +// std::chrono::system_clock::time_point tp = +// std::chrono::system_clock::now(); +// std::chrono::duration diff = tp.time_since_epoch(); +// Timestamp timestamp; +// timestamp.set_seconds(diff.count() / 1e9); +// timestamp.set_nanos(diff.count() % 1e9); \ No newline at end of file diff --git a/data_gen_dependencies/aimdk/protocol/common/timestamp_pb2.py b/data_gen_dependencies/aimdk/protocol/common/timestamp_pb2.py new file mode 100644 index 0000000..524e837 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/timestamp_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: aimdk/protocol/common/timestamp.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%aimdk/protocol/common/timestamp.proto\x12\x0e\x61imdk.protocol\"C\n\tTimestamp\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x12\x16\n\x0ems_since_epoch\x18\x03 \x01(\x03\x62\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'aimdk.protocol.common.timestamp_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals['_TIMESTAMP']._serialized_start=57 + _globals['_TIMESTAMP']._serialized_end=124 +# @@protoc_insertion_point(module_scope) diff --git a/data_gen_dependencies/aimdk/protocol/common/twist.proto b/data_gen_dependencies/aimdk/protocol/common/twist.proto new file mode 100644 index 0000000..0d35d10 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/twist.proto @@ -0,0 +1,22 @@ +syntax = "proto3"; +package aimdk.protocol; + +import public "aimdk/protocol/common/vec3.proto"; +import public "aimdk/protocol/common/timestamp.proto"; + + +/** + * 速度 + */ +message Twist { + Vec3 linear = 1; + Vec3 angular = 2; +} + +/** + * 速度 带时间戳 + */ +message TwistStamped { + Timestamp timestamp = 1; + Twist twist = 2; +} \ No newline at end of file diff --git a/data_gen_dependencies/aimdk/protocol/common/vec2.proto b/data_gen_dependencies/aimdk/protocol/common/vec2.proto new file mode 100644 index 0000000..4c6c4f1 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/vec2.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; +package aimdk.protocol; + + +/** + * @brief 二维向量 + */ +message Vec2 { + double x = 1; + double y = 2; +} diff --git a/data_gen_dependencies/aimdk/protocol/common/vec3.proto b/data_gen_dependencies/aimdk/protocol/common/vec3.proto new file mode 100644 index 0000000..ea6bc0b --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/vec3.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; +package aimdk.protocol; + + +/** + * @brief 三维向量 + */ +message Vec3 { + double x = 1; + double y = 2; + double z = 3; +} diff --git a/data_gen_dependencies/aimdk/protocol/common/vec3_pb2.py b/data_gen_dependencies/aimdk/protocol/common/vec3_pb2.py new file mode 100644 index 0000000..febff19 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/vec3_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: aimdk/protocol/common/vec3.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n aimdk/protocol/common/vec3.proto\x12\x0e\x61imdk.protocol\"\'\n\x04Vec3\x12\t\n\x01x\x18\x01 \x01(\x01\x12\t\n\x01y\x18\x02 \x01(\x01\x12\t\n\x01z\x18\x03 \x01(\x01\x62\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'aimdk.protocol.common.vec3_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals['_VEC3']._serialized_start=52 + _globals['_VEC3']._serialized_end=91 +# @@protoc_insertion_point(module_scope) diff --git a/data_gen_dependencies/aimdk/protocol/common/wrench.proto b/data_gen_dependencies/aimdk/protocol/common/wrench.proto new file mode 100644 index 0000000..6c726f4 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/common/wrench.proto @@ -0,0 +1,22 @@ +syntax = "proto3"; +package aimdk.protocol; + +import public "aimdk/protocol/common/vec3.proto"; +import public "aimdk/protocol/common/timestamp.proto"; + + +/** + * @brief 力矩 + */ +message Wrench { + Vec3 force = 1; // force 单位:N + Vec3 torque = 2; // torque 单位:N*m +} + +/** + * @brief 力矩,带时间戳 + */ +message WrenchStamped { + Timestamp timestamp = 1; + Wrench wrench = 2; +} \ No newline at end of file diff --git a/data_gen_dependencies/aimdk/protocol/hal/CMakeLists.txt b/data_gen_dependencies/aimdk/protocol/hal/CMakeLists.txt new file mode 100644 index 0000000..06291cd --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/CMakeLists.txt @@ -0,0 +1,5 @@ +# ------------------------------------------------------------- +# Protocol: aimdk::protocol::hand +# ------------------------------------------------------------- + +aimrte_protocol(DEPEND aimdk::protocol::common ROS2_DEPEND std_msgs sensor_msgs) diff --git a/data_gen_dependencies/aimdk/protocol/hal/hand/agi_claw.proto b/data_gen_dependencies/aimdk/protocol/hal/hand/agi_claw.proto new file mode 100644 index 0000000..b946b8e --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/hand/agi_claw.proto @@ -0,0 +1,20 @@ +syntax = "proto3"; +package aimdk.protocol; + + +// 夹爪手部命令 +message AgiClawHandCommand { + uint32 cmd = 1; + uint32 pos = 2; + uint32 force = 3; + uint32 clamp_method = 4; + uint32 finger_pos = 5; +} + +// 夹爪手部状态 +message AgiClawHandState { + uint32 state = 1; + uint32 pos = 2; + uint32 temperature = 3; + uint32 finger_pos = 4; +} \ No newline at end of file diff --git a/data_gen_dependencies/aimdk/protocol/hal/hand/agi_claw_pb2.py b/data_gen_dependencies/aimdk/protocol/hal/hand/agi_claw_pb2.py new file mode 100644 index 0000000..ca1f19f --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/hand/agi_claw_pb2.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: aimdk/protocol/hal/hand/agi_claw.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&aimdk/protocol/hal/hand/agi_claw.proto\x12\x0e\x61imdk.protocol\"g\n\x12\x41giClawHandCommand\x12\x0b\n\x03\x63md\x18\x01 \x01(\r\x12\x0b\n\x03pos\x18\x02 \x01(\r\x12\r\n\x05\x66orce\x18\x03 \x01(\r\x12\x14\n\x0c\x63lamp_method\x18\x04 \x01(\r\x12\x12\n\nfinger_pos\x18\x05 \x01(\r\"W\n\x10\x41giClawHandState\x12\r\n\x05state\x18\x01 \x01(\r\x12\x0b\n\x03pos\x18\x02 \x01(\r\x12\x13\n\x0btemperature\x18\x03 \x01(\r\x12\x12\n\nfinger_pos\x18\x04 \x01(\rb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'aimdk.protocol.hal.hand.agi_claw_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals['_AGICLAWHANDCOMMAND']._serialized_start=58 + _globals['_AGICLAWHANDCOMMAND']._serialized_end=161 + _globals['_AGICLAWHANDSTATE']._serialized_start=163 + _globals['_AGICLAWHANDSTATE']._serialized_end=250 +# @@protoc_insertion_point(module_scope) diff --git a/data_gen_dependencies/aimdk/protocol/hal/hand/hal_hand_service.proto b/data_gen_dependencies/aimdk/protocol/hal/hand/hal_hand_service.proto new file mode 100644 index 0000000..8b37b83 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/hand/hal_hand_service.proto @@ -0,0 +1,25 @@ +syntax = "proto3"; +package aimdk.protocol; + +import public "aimdk/protocol/common/rpc.proto"; +import public "aimdk/protocol/common/header.proto"; +import public "aimdk/protocol/hal/hand/hand.proto"; + + +// 获取机器人手部状态响应 +message HandStateResponse { + ResponseHeader header = 1; // 响应头 + HandState data = 2; // 手部状态 +} + +// 设置机器人手部控制请求 +message HandCommandRequest { + RequestHeader header = 1; // 请求头 + HandCommand data = 2; // 手部控制 +} + +// HandService +service HalHandService { + rpc GetHandState(CommonRequest) returns (HandStateResponse); + rpc SetHandCommand(HandCommandRequest) returns (CommonResponse); +} \ No newline at end of file diff --git a/data_gen_dependencies/aimdk/protocol/hal/hand/hal_hand_service_pb2.py b/data_gen_dependencies/aimdk/protocol/hal/hand/hal_hand_service_pb2.py new file mode 100644 index 0000000..e3fd240 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/hand/hal_hand_service_pb2.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: aimdk/protocol/hal/hand/hal_hand_service.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from aimdk.protocol.common import rpc_pb2 as aimdk_dot_protocol_dot_common_dot_rpc__pb2 +try: + aimdk_dot_protocol_dot_common_dot_timestamp__pb2 = aimdk_dot_protocol_dot_common_dot_rpc__pb2.aimdk_dot_protocol_dot_common_dot_timestamp__pb2 +except AttributeError: + aimdk_dot_protocol_dot_common_dot_timestamp__pb2 = aimdk_dot_protocol_dot_common_dot_rpc__pb2.aimdk.protocol.common.timestamp_pb2 +try: + aimdk_dot_protocol_dot_common_dot_header__pb2 = aimdk_dot_protocol_dot_common_dot_rpc__pb2.aimdk_dot_protocol_dot_common_dot_header__pb2 +except AttributeError: + aimdk_dot_protocol_dot_common_dot_header__pb2 = aimdk_dot_protocol_dot_common_dot_rpc__pb2.aimdk.protocol.common.header_pb2 +from aimdk.protocol.common import header_pb2 as aimdk_dot_protocol_dot_common_dot_header__pb2 +from aimdk.protocol.hal.hand import hand_pb2 as aimdk_dot_protocol_dot_hal_dot_hand_dot_hand__pb2 +try: + aimdk_dot_protocol_dot_common_dot_header__pb2 = aimdk_dot_protocol_dot_hal_dot_hand_dot_hand__pb2.aimdk_dot_protocol_dot_common_dot_header__pb2 +except AttributeError: + aimdk_dot_protocol_dot_common_dot_header__pb2 = aimdk_dot_protocol_dot_hal_dot_hand_dot_hand__pb2.aimdk.protocol.common.header_pb2 +try: + aimdk_dot_protocol_dot_hal_dot_hand_dot_inspire__pb2 = aimdk_dot_protocol_dot_hal_dot_hand_dot_hand__pb2.aimdk_dot_protocol_dot_hal_dot_hand_dot_inspire__pb2 +except AttributeError: + aimdk_dot_protocol_dot_hal_dot_hand_dot_inspire__pb2 = aimdk_dot_protocol_dot_hal_dot_hand_dot_hand__pb2.aimdk.protocol.hal.hand.inspire_pb2 +try: + aimdk_dot_protocol_dot_hal_dot_hand_dot_jodell__pb2 = aimdk_dot_protocol_dot_hal_dot_hand_dot_hand__pb2.aimdk_dot_protocol_dot_hal_dot_hand_dot_jodell__pb2 +except AttributeError: + aimdk_dot_protocol_dot_hal_dot_hand_dot_jodell__pb2 = aimdk_dot_protocol_dot_hal_dot_hand_dot_hand__pb2.aimdk.protocol.hal.hand.jodell_pb2 +try: + aimdk_dot_protocol_dot_hal_dot_hand_dot_agi__claw__pb2 = aimdk_dot_protocol_dot_hal_dot_hand_dot_hand__pb2.aimdk_dot_protocol_dot_hal_dot_hand_dot_agi__claw__pb2 +except AttributeError: + aimdk_dot_protocol_dot_hal_dot_hand_dot_agi__claw__pb2 = aimdk_dot_protocol_dot_hal_dot_hand_dot_hand__pb2.aimdk.protocol.hal.hand.agi_claw_pb2 + +from aimdk.protocol.common.rpc_pb2 import * +from aimdk.protocol.common.header_pb2 import * +from aimdk.protocol.hal.hand.hand_pb2 import * + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n.aimdk/protocol/hal/hand/hal_hand_service.proto\x12\x0e\x61imdk.protocol\x1a\x1f\x61imdk/protocol/common/rpc.proto\x1a\"aimdk/protocol/common/header.proto\x1a\"aimdk/protocol/hal/hand/hand.proto\"l\n\x11HandStateResponse\x12.\n\x06header\x18\x01 \x01(\x0b\x32\x1e.aimdk.protocol.ResponseHeader\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.aimdk.protocol.HandState\"n\n\x12HandCommandRequest\x12-\n\x06header\x18\x01 \x01(\x0b\x32\x1d.aimdk.protocol.RequestHeader\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.aimdk.protocol.HandCommand2\xb8\x01\n\x0eHalHandService\x12P\n\x0cGetHandState\x12\x1d.aimdk.protocol.CommonRequest\x1a!.aimdk.protocol.HandStateResponse\x12T\n\x0eSetHandCommand\x12\".aimdk.protocol.HandCommandRequest\x1a\x1e.aimdk.protocol.CommonResponseP\x00P\x01P\x02\x62\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'aimdk.protocol.hal.hand.hal_hand_service_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals['_HANDSTATERESPONSE']._serialized_start=171 + _globals['_HANDSTATERESPONSE']._serialized_end=279 + _globals['_HANDCOMMANDREQUEST']._serialized_start=281 + _globals['_HANDCOMMANDREQUEST']._serialized_end=391 + _globals['_HALHANDSERVICE']._serialized_start=394 + _globals['_HALHANDSERVICE']._serialized_end=578 +# @@protoc_insertion_point(module_scope) diff --git a/data_gen_dependencies/aimdk/protocol/hal/hand/hal_hand_service_pb2_grpc.py b/data_gen_dependencies/aimdk/protocol/hal/hand/hal_hand_service_pb2_grpc.py new file mode 100644 index 0000000..340d202 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/hand/hal_hand_service_pb2_grpc.py @@ -0,0 +1,103 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from aimdk.protocol.common import rpc_pb2 as aimdk_dot_protocol_dot_common_dot_rpc__pb2 +from aimdk.protocol.hal.hand import hal_hand_service_pb2 as aimdk_dot_protocol_dot_hal_dot_hand_dot_hal__hand__service__pb2 + + +class HalHandServiceStub(object): + """HandService + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.GetHandState = channel.unary_unary( + '/aimdk.protocol.HalHandService/GetHandState', + request_serializer=aimdk_dot_protocol_dot_common_dot_rpc__pb2.CommonRequest.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_hand_dot_hal__hand__service__pb2.HandStateResponse.FromString, + ) + self.SetHandCommand = channel.unary_unary( + '/aimdk.protocol.HalHandService/SetHandCommand', + request_serializer=aimdk_dot_protocol_dot_hal_dot_hand_dot_hal__hand__service__pb2.HandCommandRequest.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_common_dot_rpc__pb2.CommonResponse.FromString, + ) + + +class HalHandServiceServicer(object): + """HandService + """ + + def GetHandState(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SetHandCommand(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_HalHandServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'GetHandState': grpc.unary_unary_rpc_method_handler( + servicer.GetHandState, + request_deserializer=aimdk_dot_protocol_dot_common_dot_rpc__pb2.CommonRequest.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_hand_dot_hal__hand__service__pb2.HandStateResponse.SerializeToString, + ), + 'SetHandCommand': grpc.unary_unary_rpc_method_handler( + servicer.SetHandCommand, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_hand_dot_hal__hand__service__pb2.HandCommandRequest.FromString, + response_serializer=aimdk_dot_protocol_dot_common_dot_rpc__pb2.CommonResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'aimdk.protocol.HalHandService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class HalHandService(object): + """HandService + """ + + @staticmethod + def GetHandState(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.HalHandService/GetHandState', + aimdk_dot_protocol_dot_common_dot_rpc__pb2.CommonRequest.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_hand_dot_hal__hand__service__pb2.HandStateResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def SetHandCommand(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.HalHandService/SetHandCommand', + aimdk_dot_protocol_dot_hal_dot_hand_dot_hal__hand__service__pb2.HandCommandRequest.SerializeToString, + aimdk_dot_protocol_dot_common_dot_rpc__pb2.CommonResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/data_gen_dependencies/aimdk/protocol/hal/hand/hand.proto b/data_gen_dependencies/aimdk/protocol/hal/hand/hand.proto new file mode 100644 index 0000000..68077f6 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/hand/hand.proto @@ -0,0 +1,35 @@ +syntax = "proto3"; +package aimdk.protocol; + +import public "aimdk/protocol/common/header.proto"; +import public "aimdk/protocol/hal/hand/inspire.proto"; +import public "aimdk/protocol/hal/hand/jodell.proto"; +import public "aimdk/protocol/hal/hand/agi_claw.proto"; + + +message SingleHandState { + oneof hand { + InspireHand inspire = 1; // 因时手 + ClawHand claw = 2; // jodell夹爪手 + AgiClawHandState agi_claw_state = 3; // agi自研夹爪状态 + } +} + +message SingleHandCommand { + oneof hand { + InspireHand inspire = 1; // 因时手 + ClawHand claw = 2; // jodell夹爪手 + AgiClawHandCommand agi_claw_cmd = 3; // agi自研夹爪命令 + } +} + +message HandState { + SingleHandState left = 2; // 左手状态 + SingleHandState right = 3; // 右手状态 +} + +message HandCommand { + SingleHandCommand left = 2; // 左手指令 + SingleHandCommand right = 3; // 右手指令 +} + diff --git a/data_gen_dependencies/aimdk/protocol/hal/hand/hand_channel.proto b/data_gen_dependencies/aimdk/protocol/hal/hand/hand_channel.proto new file mode 100644 index 0000000..92a5e4a --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/hand/hand_channel.proto @@ -0,0 +1,18 @@ +syntax = "proto3"; +package aimdk.protocol; + +import public "aimdk/protocol/common/header.proto"; +import public "aimdk/protocol/hal/hand/hand.proto"; + + +// 机器人手部状态 +message HandStateChannel { + Header header = 1; // 消息头 + HandState data = 2; // 手部状态 +} + +// 机器人手部指令 +message HandCommandChannel { + Header header = 1; // 消息头 + HandCommand data = 2; // 手部指令 +} diff --git a/data_gen_dependencies/aimdk/protocol/hal/hand/hand_pb2.py b/data_gen_dependencies/aimdk/protocol/hal/hand/hand_pb2.py new file mode 100644 index 0000000..6061ed1 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/hand/hand_pb2.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: aimdk/protocol/hal/hand/hand.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from aimdk.protocol.common import header_pb2 as aimdk_dot_protocol_dot_common_dot_header__pb2 +from aimdk.protocol.hal.hand import inspire_pb2 as aimdk_dot_protocol_dot_hal_dot_hand_dot_inspire__pb2 +from aimdk.protocol.hal.hand import jodell_pb2 as aimdk_dot_protocol_dot_hal_dot_hand_dot_jodell__pb2 +from aimdk.protocol.hal.hand import agi_claw_pb2 as aimdk_dot_protocol_dot_hal_dot_hand_dot_agi__claw__pb2 + +from aimdk.protocol.common.header_pb2 import * +from aimdk.protocol.hal.hand.inspire_pb2 import * +from aimdk.protocol.hal.hand.jodell_pb2 import * +from aimdk.protocol.hal.hand.agi_claw_pb2 import * + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"aimdk/protocol/hal/hand/hand.proto\x12\x0e\x61imdk.protocol\x1a\"aimdk/protocol/common/header.proto\x1a%aimdk/protocol/hal/hand/inspire.proto\x1a$aimdk/protocol/hal/hand/jodell.proto\x1a&aimdk/protocol/hal/hand/agi_claw.proto\"\xaf\x01\n\x0fSingleHandState\x12.\n\x07inspire\x18\x01 \x01(\x0b\x32\x1b.aimdk.protocol.InspireHandH\x00\x12(\n\x04\x63law\x18\x02 \x01(\x0b\x32\x18.aimdk.protocol.ClawHandH\x00\x12:\n\x0e\x61gi_claw_state\x18\x03 \x01(\x0b\x32 .aimdk.protocol.AgiClawHandStateH\x00\x42\x06\n\x04hand\"\xb1\x01\n\x11SingleHandCommand\x12.\n\x07inspire\x18\x01 \x01(\x0b\x32\x1b.aimdk.protocol.InspireHandH\x00\x12(\n\x04\x63law\x18\x02 \x01(\x0b\x32\x18.aimdk.protocol.ClawHandH\x00\x12:\n\x0c\x61gi_claw_cmd\x18\x03 \x01(\x0b\x32\".aimdk.protocol.AgiClawHandCommandH\x00\x42\x06\n\x04hand\"j\n\tHandState\x12-\n\x04left\x18\x02 \x01(\x0b\x32\x1f.aimdk.protocol.SingleHandState\x12.\n\x05right\x18\x03 \x01(\x0b\x32\x1f.aimdk.protocol.SingleHandState\"p\n\x0bHandCommand\x12/\n\x04left\x18\x02 \x01(\x0b\x32!.aimdk.protocol.SingleHandCommand\x12\x30\n\x05right\x18\x03 \x01(\x0b\x32!.aimdk.protocol.SingleHandCommandP\x00P\x01P\x02P\x03\x62\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'aimdk.protocol.hal.hand.hand_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals['_SINGLEHANDSTATE']._serialized_start=208 + _globals['_SINGLEHANDSTATE']._serialized_end=383 + _globals['_SINGLEHANDCOMMAND']._serialized_start=386 + _globals['_SINGLEHANDCOMMAND']._serialized_end=563 + _globals['_HANDSTATE']._serialized_start=565 + _globals['_HANDSTATE']._serialized_end=671 + _globals['_HANDCOMMAND']._serialized_start=673 + _globals['_HANDCOMMAND']._serialized_end=785 +# @@protoc_insertion_point(module_scope) diff --git a/data_gen_dependencies/aimdk/protocol/hal/hand/inspire.proto b/data_gen_dependencies/aimdk/protocol/hal/hand/inspire.proto new file mode 100644 index 0000000..29f1d61 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/hand/inspire.proto @@ -0,0 +1,25 @@ +syntax = "proto3"; +package aimdk.protocol; + + +// 手指 +message Finger { + int32 thumb_pos_0 = 1; // 拇指第一关节 + int32 thumb_pos_1 = 2; // 拇指第二关节 + int32 index_pos = 3; // 食指 + int32 middle_pos = 4; // 中指 + int32 ring_pos = 5; // 无名指 + int32 pinky_pos = 6; // 小指 +} + +// 手腕 +message Wrist { + int32 x = 1; // x轴 + int32 y = 2; // y轴 +} + +// Inspire手部 +message InspireHand { + Finger finger = 1; // 手指 + Wrist wrist = 2; // 手腕 +} diff --git a/data_gen_dependencies/aimdk/protocol/hal/hand/inspire_pb2.py b/data_gen_dependencies/aimdk/protocol/hal/hand/inspire_pb2.py new file mode 100644 index 0000000..965b765 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/hand/inspire_pb2.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: aimdk/protocol/hal/hand/inspire.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%aimdk/protocol/hal/hand/inspire.proto\x12\x0e\x61imdk.protocol\"~\n\x06\x46inger\x12\x13\n\x0bthumb_pos_0\x18\x01 \x01(\x05\x12\x13\n\x0bthumb_pos_1\x18\x02 \x01(\x05\x12\x11\n\tindex_pos\x18\x03 \x01(\x05\x12\x12\n\nmiddle_pos\x18\x04 \x01(\x05\x12\x10\n\x08ring_pos\x18\x05 \x01(\x05\x12\x11\n\tpinky_pos\x18\x06 \x01(\x05\"\x1d\n\x05Wrist\x12\t\n\x01x\x18\x01 \x01(\x05\x12\t\n\x01y\x18\x02 \x01(\x05\"[\n\x0bInspireHand\x12&\n\x06\x66inger\x18\x01 \x01(\x0b\x32\x16.aimdk.protocol.Finger\x12$\n\x05wrist\x18\x02 \x01(\x0b\x32\x15.aimdk.protocol.Wristb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'aimdk.protocol.hal.hand.inspire_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals['_FINGER']._serialized_start=57 + _globals['_FINGER']._serialized_end=183 + _globals['_WRIST']._serialized_start=185 + _globals['_WRIST']._serialized_end=214 + _globals['_INSPIREHAND']._serialized_start=216 + _globals['_INSPIREHAND']._serialized_end=307 +# @@protoc_insertion_point(module_scope) diff --git a/data_gen_dependencies/aimdk/protocol/hal/hand/jodell.proto b/data_gen_dependencies/aimdk/protocol/hal/hand/jodell.proto new file mode 100644 index 0000000..4552fe0 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/hand/jodell.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; +package aimdk.protocol; + + +// 夹爪手部 +message ClawHand { + uint32 position = 1; + uint32 velocity = 2; + uint32 force = 3; +} \ No newline at end of file diff --git a/data_gen_dependencies/aimdk/protocol/hal/hand/jodell_pb2.py b/data_gen_dependencies/aimdk/protocol/hal/hand/jodell_pb2.py new file mode 100644 index 0000000..a592ff9 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/hand/jodell_pb2.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: aimdk/protocol/hal/hand/jodell.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$aimdk/protocol/hal/hand/jodell.proto\x12\x0e\x61imdk.protocol\"=\n\x08\x43lawHand\x12\x10\n\x08position\x18\x01 \x01(\r\x12\x10\n\x08velocity\x18\x02 \x01(\r\x12\r\n\x05\x66orce\x18\x03 \x01(\rb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'aimdk.protocol.hal.hand.jodell_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals['_CLAWHAND']._serialized_start=56 + _globals['_CLAWHAND']._serialized_end=117 +# @@protoc_insertion_point(module_scope) diff --git a/data_gen_dependencies/aimdk/protocol/hal/jaka/jaka.proto b/data_gen_dependencies/aimdk/protocol/hal/jaka/jaka.proto new file mode 100644 index 0000000..3622d29 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/jaka/jaka.proto @@ -0,0 +1,733 @@ +syntax = "proto3"; +package aimdk.protocol; + +import public "aimdk/protocol/common/vec3.proto"; +import public "aimdk/protocol/common/quaternion.proto"; +import public "aimdk/protocol/common/rpy.proto"; +import public "aimdk/protocol/common/se3_pose.proto"; + + +/** + * @brief 关节位置,单位rad + */ +message JointPos { // Jaka: JointValue + repeated double jPos = 1; +} + +/** + * @brief 负载 + */ +message PayLoad { + double mass = 1; // 负载质量,单位:kg + Vec3 centroid = 2; // 负载质心, 单位:mm +} + +/** + * @brief 运动模式 + */ +enum MoveMode { + MoveMode_ABS = 0; // 绝对运动 + MoveMode_INCR = 1; // 相对运动 + MoveMode_CONTINUE = 2; // 连续运动 +} + +/** + * @brief 机器人状态 + */ +message RobotState { + int32 estoped = 1; // 是否急停 + int32 poweredOn = 2; // 是否打开电源 + int32 servoEnabled = 3; // 是否使能 +} + +/** + * @brief 机械臂单关节瞬时信息 + */ +message JointMonitorData { + double instCurrent = 1; // 瞬时电流 + double instVoltage = 2; // 瞬时电压 + double instTemperature = 3; // 瞬时温度 + double instVel = 4; // 瞬时速度 控制器1.7.0.20及以上 + double instTorq = 5; // 瞬时力矩 +} + +/** + * @brief 机械臂关节信息 + */ +message RobotMonitorData { + double scbMajorVersion = 1; // scb主版本号 + double scbMinorVersion = 2; // scb小版本号 + double cabTemperature = 3; // 控制器温度 + double robotAveragePower = 4; // 机器人平均电压 + double robotAverageCurrent = 5; // 机器人平均电流 + repeated JointMonitorData joint_monitor_data = 6; // 机器人7个关节的监测数据 +} + +/** + * @brief 力传感器监控信息 + */ +message TorqSernsorMonitorData { + string ip = 1; // 力矩传感器ip地址 + int32 port = 2; // 力矩传感器端口号 + PayLoad payLoad = 3; // 工具负载 + int32 status = 4; // 力矩传感器状态 + int32 errcode = 5; // 力矩传感器异常错误码 + repeated double actTorque = 6; // 力矩传感器实际接触力值(勾选初始化时)或原始读数值(勾选不初始化时) + repeated double torque = 7; // 力矩传感器原始读数值 + repeated double realTorque = 8; // 力矩传感器实际接触力值(不随初始化选项变化) +} + +/** + * @brief 机械臂详细状态信息 + */ +message RobotStatus { + int32 errcode = 1; + int32 inpos = 2; + int32 powered_on = 3; + int32 enabled = 4; + double rapidrate = 5; + int32 protective_stop = 6; + int32 emergency_stop = 7; + repeated double cartesiantran_pos = 8; + repeated double joint_pos = 9; + uint32 on_soft_limit = 10; + uint32 cur_user_id = 11; + int32 drag_status = 12; + RobotMonitorData robot_monitor_data = 13; + TorqSernsorMonitorData torq_sensor_monitor_data = 14; + int32 is_socket_connect = 15; +} + +/** + * @brief 可选参数 + */ +message OptionalCond { + int32 executingLineId = 1; +} + +/** + * @brief 错误状态 + */ +message ErrorCode { + int32 code = 1; + string message = 2; +} + +/** + * @brief rpc GetJointPosition() 请求信息 + */ +message GetJointPositionReq { + string robot_name = 1; +} + +/** + * @brief rpc GetJointPosition() 响应信息 + */ +message GetJointPositionRsp { + JointPos pos = 1; + int32 errcode = 2; + string errmsg = 3; +} + +/** + * @brief JointMove扩展参数 + */ +message JointMoveExt { + double acc = 1; + double tol = 2; + OptionalCond option_cond = 3; +} + +/** + * @brief rpc JointMove() 请求信息 + */ +message JointMoveReq { + string robot_name = 1; + JointPos pos = 2; + MoveMode mode = 3; + bool is_block = 4; + double distance_frame = 5; + JointMoveExt ext = 6; + bool ee_interpolation = 7; +} + +/** + * @brief rpc JointMove() 响应信息 + */ +message JointMoveRsp { + int32 errcode = 1; + string errmsg = 2; +} + +/** + * @brief LinearMove扩展参数 + */ +message LinearMoveExt { + double acc = 1; + double tol = 2; + OptionalCond option_cond = 3; +} + +/** + * @brief rpc LinearMove() 请求信息 + */ +message LinearMoveReq { + string robot_name = 1; + SE3RpyPose pose = 2; + MoveMode mode = 3; + bool is_block = 4; + double distance_frame = 5; + LinearMoveExt ext = 6; + bool ee_interpolation = 7; +} + +/** + * @brief rpc LinearMove() 响应信息 + */ +message LinearMoveRsp { + int32 errcode = 1; + string errmsg = 2; +} + +/** + * @brief rpc GetRobotState() 请求信息 + */ +message GetRobotStateReq { + string robot_name = 1; +} + +/** + * @brief rpc GetRobotState() 响应信息 + */ +message GetRobotStateRsp { + RobotState state = 1; + int32 errcode = 2; + string errmsg = 3; +} + +/** + * @brief rpc GetRobotStatus() 请求信息 + */ +message GetRobotStatusReq { + string robot_name = 1; +} + +/** + * @brief rpc GetRobotStatus() 响应信息 + */ +message GetRobotStatusRsp { + RobotStatus status = 1; + int32 errcode = 2; + string errmsg = 3; +} + +/** + * @brief rpc IsInPos() 请求信息 + */ +message IsInPosReq { + string robot_name = 1; +} + +/** + * @brief rpc IsInPos() 响应信息 + */ +message IsInPosRsp { + bool is_in_pos = 1; + int32 errcode = 2; + string errmsg = 3; +} + +/** + * @brief rpc IsOnLimit() 请求信息 + */ +message IsOnLimitReq { + string robot_name = 1; +} + +/** + * @brief rpc IsOnLimit() 响应信息 + */ +message IsOnLimitRsp { + bool is_on_limit = 1; + int32 errcode = 2; + string errmsg = 3; +} + +/** + * @brief rpc GetTcpPosition() 请求信息 + */ +message GetTcpPositionReq { + string robot_name = 1; +} + +/** + * @brief rpc GetTcpPosition() 响应信息 + */ +message GetTcpPositionRsp { + SE3RpyPose pose = 1; + int32 errcode = 2; + string errmsg = 3; +} + +/** + * @brief rpc KineForward() 请求信息 + */ +message KineForwardReq { + string robot_name = 1; + JointPos pos = 2; +} + +/** + * @brief rpc KineForward() 响应信息 + */ +message KineForwardRsp { + SE3RpyPose pose = 1; + int32 errcode = 2; + string errmsg = 3; +} + +/** + * @brief rpc KineInverse() 请求信息 + */ +message KineInverseReq { + string robot_name = 1; + JointPos ref_pos = 2; + SE3RpyPose pose = 3; +} + +/** + * @brief rpc KineInverse() 响应信息 + */ +message KineInverseRsp { + JointPos pos = 1; + int32 errcode = 2; + string errmsg = 3; +} + +/** + * @brief rpc ClearError() 请求信息 + */ +message ClearErrorReq { + string robot_name = 1; +} + +/** + * @brief rpc ClearError() 响应信息 + */ +message ClearErrorRsp { + int32 errcode = 1; + string errmsg = 2; +} + +/** + * @brief rpc GetLastError() 请求信息 + */ +message GetLastErrorReq { + string robot_name = 1; +} + +/** + * @brief rpc GetLastError() 响应信息 + */ +message GetLastErrorRsp { + ErrorCode robot_error = 1; + int32 errcode = 2; + string errmsg = 3; +} + +/** + * @brief rpc IsInCollision() 请求信息 + */ +message IsInCollisionReq { + string robot_name = 1; +} + +/** + * @brief rpc IsInCollision() 响应信息 + */ +message IsInCollisionRsp { + bool is_in_collision = 1; + int32 errcode = 2; + string errmsg = 3; +} + +/** + * @brief rpc CollisionRecover() 请求信息 + */ +message CollisionRecoverReq { + string robot_name = 1; +} + +/** + * @brief rpc CollisionRecover() 响应信息 + */ +message CollisionRecoverRsp { + int32 errcode = 1; + string errmsg = 2; +} + +/** + * @brief rpc GetCollisionLevel() 请求信息 + */ +message GetCollisionLevelReq { + string robot_name = 1; +} + +/** + * @brief rpc GetCollisionLevel() 响应信息 + */ +message GetCollisionLevelRsp { + int32 level = 1; + int32 errcode = 2; + string errmsg = 3; +} + +/** + * @brief rpc SetCollisionLevel() 请求信息 + */ +message SetCollisionLevelReq { + string robot_name = 1; + int32 level = 2; +} + +/** + * @brief rpc SetCollisionLevel() 响应信息 + */ +message SetCollisionLevelRsp { + int32 errcode = 1; + string errmsg = 2; +} + +/** + * @brief rpc EnableServoControl() 请求信息 + */ +message EnableServoControlReq { + string robot_name = 1; + bool enable = 2; +} + +/** + * @brief rpc EnableServoControl() 响应信息 + */ +message EnableServoControlRsp { + int32 errcode = 1; + string errmsg = 2; +} + +/** + * @brief rpc EnableTorqueControl() 请求信息 + */ +message EnableTorqueControlReq { + string robot_name = 1; + bool enable = 2; + int32 period = 3; +} + +/** + * @brief rpc EnableTorqueControl() 响应信息 + */ +message EnableTorqueControlRsp { + int32 errcode = 1; + string errmsg = 2; +} + +// InstallationAngle +/** + * @brief rpc SetInstallationAngle() 请求信息 + */ +message SetInstallationAngleReq { + string robot_name = 1; + double angleX = 2; + double angleZ = 3; +} + +/** + * @brief rpc SetInstallationAngle() 响应信息 + */ +message SetInstallationAngleRsp { + int32 errcode = 1; + string errmsg = 2; +} + +/** + * @brief rpc GetInstallationAngle() 请求信息 + */ +message GetInstallationAngleReq { + string robot_name = 1; +} + +/** + * @brief rpc GetInstallationAngle() 响应信息 + */ +message GetInstallationAngleRsp { + Quaternion quat = 1; + Rpy rpy = 2; + int32 errcode = 3; + string errmsg = 4; +} + +/** + * @brief rpc EnableAdmittanceCtrl() 请求信息 + */ +message EnableAdmittanceCtrlReq { + string robot_name = 1; + bool enable_flag = 2; +} + +/** + * @brief rpc EnableAdmittanceCtrl() 响应信息 + */ +message EnableAdmittanceCtrlRsp { + int32 errcode = 1; + string errmsg = 2; +} + +/** + * @brief rpc SetFtCtrlFrame() 请求信息 + */ +message SetFtCtrlFrameReq { + string robot_name = 1; + int32 ftFrame = 2; +} + +/** + * @brief rpc SetFtCtrlFrame() 响应信息 + */ +message SetFtCtrlFrameRsp { + int32 errcode = 1; + string errmsg = 2; +} + +/** + * @brief rpc DisableForceControl() 请求信息 + */ +message DisableForceControlReq { + string robot_name = 1; +} + +/** + * @brief rpc DisableForceControl() 响应信息 + */ +message DisableForceControlRsp { + int32 errcode = 1; + string errmsg = 2; +} + +/** + * @brief rpc SetCompliantType() 请求信息 + */ +message SetCompliantTypeReq { + string robot_name = 1; + int32 sensor_compensation = 2; + int32 compliance_type = 3; +} + +/** + * @brief rpc SetCompliantType() 响应信息 + */ +message SetCompliantTypeRsp { + int32 errcode = 1; + string errmsg = 2; +} + +/** + * @brief rpc SetTorqueSensorMode() 请求信息 + */ +message SetTorqueSensorModeReq { + string robot_name = 1; + int32 sensor_mode = 2; +} + +/** + * @brief rpc SetTorqueSensorMode() 响应信息 + */ +message SetTorqueSensorModeRsp { + int32 errcode = 1; + string errmsg = 2; +} + +/** + * @brief rpc SetTorqueSensorMode() 请求信息 + */ +message SetAdmitCtrlConfigReq { + string robot_name = 1; + int32 axis = 2; + int32 opt = 3; + double ftUser = 4; + double ftConstant = 5; + int32 ftNnormalTrack = 6; + double ftReboundFK = 7; +} + +/** + * @brief rpc SetAdmitCtrlConfig() 响应信息 + */ +message SetAdmitCtrlConfigRsp { + int32 errcode = 1; + string errmsg = 2; +} + +/** + * @brief 由 aimrt_hal 中的 jaka_module::JakaModule 模块提供的服务 + */ +service A2wArmControlService { + /** + * @brief 获取 jaka_module::JakaModule 收到的关节角度。 + * @param GetJointPositionReq + * @return GetJointPositionRsp + */ + rpc GetJointPosition(GetJointPositionReq) returns (GetJointPositionRsp); + /** + * @brief jaka_module::JakaModule 执行普通关节运动。 + * @param JointMoveReq + * @return JointMoveRsp + */ + rpc JointMove(JointMoveReq) returns (JointMoveRsp); + /** + * @brief jaka_module::JakaModule 执行末端线性运动。 + * @param LinearMoveReq + * @return LinearMoveRsp + */ + rpc LinearMove(LinearMoveReq) returns (LinearMoveRsp); + /** + * @brief 获取 jaka_module::JakaModule 收到的机械臂状态信息。 + * @param GetRobotStateReq + * @return GetRobotStateRsp + */ + rpc GetRobotState(GetRobotStateReq) returns (GetRobotStateRsp); + /** + * @brief 获取 jaka_module::JakaModule 收到的机械臂详细信息。 + * @param GetRobotStatusReq + * @return GetRobotStatusRsp + */ + rpc GetRobotStatus(GetRobotStatusReq) returns (GetRobotStatusRsp); + /** + * @brief 获取 jaka_module::JakaModule 收到的机械臂是否到达目标点的状态信息。 + * @param IsInPosReq + * @return IsInPosRsp + */ + rpc IsInPos(IsInPosReq) returns (IsInPosRsp); + /** + * @brief 获取 jaka_module::JakaModule 收到的机械臂是否限位的信息。 + * @param IsOnLimitReq + * @return IsOnLimitRsp + */ + rpc IsOnLimit(IsOnLimitReq) returns (IsOnLimitRsp); + /** + * @brief 获取 jaka_module::JakaModule 收到的末端笛卡尔位姿信息。 + * @param GetTcpPositionReq + * @return GetTcpPositionRsp + */ + rpc GetTcpPosition(GetTcpPositionReq) returns (GetTcpPositionRsp); + /** + * @brief 通过 jaka_module::JakaModule 进行运动学正解。 + * @param KineForwardReq + * @return KineForwardRsp + */ + rpc KineForward(KineForwardReq) returns (KineForwardRsp); + /** + * @brief 通过 jaka_module::JakaModule 进行运动学逆解。 + * @param KineInverseReq + * @return KineInverseRsp + */ + rpc KineInverse(KineInverseReq) returns (KineInverseRsp); + /** + * @brief 通过 jaka_module::JakaModule 清理错误。 + * @param ClearErrorReq + * @return ClearErrorRsp + */ + rpc ClearError(ClearErrorReq) returns (ClearErrorRsp); + /** + * @brief 获取 jaka_module::JakaModule 收到的机械臂错误信息。 + * @param GetLastErrorReq + * @return GetLastErrorRsp + */ + rpc GetLastError(GetLastErrorReq) returns (GetLastErrorRsp); + /** + * @brief 获取 jaka_module::JakaModule 收到的机械臂是否碰撞的状态信息。 + * @param IsInCollisionReq + * @return IsInCollisionRsp + */ + rpc IsInCollision(IsInCollisionReq) returns (IsInCollisionRsp); + /** + * @brief 通过 jaka_module::JakaModule 进行碰撞恢复。 + * @param CollisionRecoverReq + * @return CollisionRecoverRsp + */ + rpc CollisionRecover(CollisionRecoverReq) returns (CollisionRecoverRsp); + /** + * @brief 获取 jaka_module::JakaModule 收到的机械臂碰撞登记。 + * @param GetCollisionLevelReq + * @return GetCollisionLevelRsp + */ + rpc GetCollisionLevel(GetCollisionLevelReq) returns (GetCollisionLevelRsp); + /** + * @brief 通过 jaka_module::JakaModule 设置机械臂碰撞登记。 + * @param SetCollisionLevelReq + * @return SetCollisionLevelRsp + */ + rpc SetCollisionLevel(SetCollisionLevelReq) returns (SetCollisionLevelRsp); + /** + * @brief 通过 jaka_module::JakaModule 进行伺服使能控制。 + * @param EnableServoControlReq + * @return EnableServoControlRsp + */ + rpc EnableServoControl(EnableServoControlReq) returns (EnableServoControlRsp); + /** + * @brief 通过 jaka_module::JakaModule 进行电流环控制。 + * @param EnableTorqueControlReq + * @return EnableTorqueControlRsp + */ + rpc EnableTorqueControl(EnableTorqueControlReq) returns (EnableTorqueControlRsp); + /** + * @brief 通过 jaka_module::JakaModule 设置安装角度。 + * @param SetInstallationAngleReq + * @return SetInstallationAngleRsp + */ + rpc SetInstallationAngle(SetInstallationAngleReq) returns (SetInstallationAngleRsp); + /** + * @brief 获取 jaka_module::JakaModule 收到的机械臂安装角度。 + * @param GetInstallationAngleReq + * @return GetInstallationAngleRsp + */ + rpc GetInstallationAngle(GetInstallationAngleReq) returns (GetInstallationAngleRsp); + /** + * @brief 通过 jaka_module::JakaModule 设置力控拖拽使能。 + * @param EnableAdmittanceCtrlReq + * @return EnableAdmittanceCtrlRsp + */ + rpc EnableAdmittanceCtrl(EnableAdmittanceCtrlReq) returns (EnableAdmittanceCtrlRsp); + /** + * @brief 通过 jaka_module::JakaModule 设置导纳控制运动坐标系。 + * @param SetFtCtrlFrameReq + * @return SetFtCtrlFrameRsp + */ + rpc SetFtCtrlFrame(SetFtCtrlFrameReq) returns (SetFtCtrlFrameRsp); + /** + * @brief 通过 jaka_module::JakaModule 关闭力控。 + * @param DisableForceControlReq + * @return DisableForceControlRsp + */ + rpc DisableForceControl(DisableForceControlReq) returns (DisableForceControlRsp); + + /** + * @brief 通过 jaka_module::JakaModule 设置力控类型和传感器初始化状态。 + * @param SetCompliantTypeReq + * @return SetCompliantTypeRsp + */ + rpc SetCompliantType(SetCompliantTypeReq) returns (SetCompliantTypeRsp); + + /** + * @brief 通过 jaka_module::JakaModule 开启或关闭力矩传感器。 + * @param SetTorqueSensorModeReq + * @return SetTorqueSensorModeRsp + */ + rpc SetTorqueSensorMode(SetTorqueSensorModeReq) returns (SetTorqueSensorModeRsp); + + /** + * @brief 通过 jaka_module::JakaModule 设置柔顺控制参数。 + * @param SetAdmitCtrlConfigReq + * @return SetAdmitCtrlConfigRsp + */ + rpc SetAdmitCtrlConfig(SetAdmitCtrlConfigReq) returns (SetAdmitCtrlConfigRsp); +} diff --git a/data_gen_dependencies/aimdk/protocol/hal/jaka/jaka_pb2.py b/data_gen_dependencies/aimdk/protocol/hal/jaka/jaka_pb2.py new file mode 100644 index 0000000..896a8af --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/jaka/jaka_pb2.py @@ -0,0 +1,174 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: aimdk/protocol/hal/jaka/jaka.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from aimdk.protocol.common import vec3_pb2 as aimdk_dot_protocol_dot_common_dot_vec3__pb2 +from aimdk.protocol.common import quaternion_pb2 as aimdk_dot_protocol_dot_common_dot_quaternion__pb2 +from aimdk.protocol.common import rpy_pb2 as aimdk_dot_protocol_dot_common_dot_rpy__pb2 +from aimdk.protocol.common import se3_pose_pb2 as aimdk_dot_protocol_dot_common_dot_se3__pose__pb2 +try: + aimdk_dot_protocol_dot_common_dot_vec3__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk_dot_protocol_dot_common_dot_vec3__pb2 +except AttributeError: + aimdk_dot_protocol_dot_common_dot_vec3__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk.protocol.common.vec3_pb2 +try: + aimdk_dot_protocol_dot_common_dot_quaternion__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk_dot_protocol_dot_common_dot_quaternion__pb2 +except AttributeError: + aimdk_dot_protocol_dot_common_dot_quaternion__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk.protocol.common.quaternion_pb2 +try: + aimdk_dot_protocol_dot_common_dot_rpy__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk_dot_protocol_dot_common_dot_rpy__pb2 +except AttributeError: + aimdk_dot_protocol_dot_common_dot_rpy__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk.protocol.common.rpy_pb2 + +from aimdk.protocol.common.vec3_pb2 import * +from aimdk.protocol.common.quaternion_pb2 import * +from aimdk.protocol.common.rpy_pb2 import * +from aimdk.protocol.common.se3_pose_pb2 import * + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"aimdk/protocol/hal/jaka/jaka.proto\x12\x0e\x61imdk.protocol\x1a aimdk/protocol/common/vec3.proto\x1a&aimdk/protocol/common/quaternion.proto\x1a\x1f\x61imdk/protocol/common/rpy.proto\x1a$aimdk/protocol/common/se3_pose.proto\"\x18\n\x08JointPos\x12\x0c\n\x04jPos\x18\x01 \x03(\x01\"?\n\x07PayLoad\x12\x0c\n\x04mass\x18\x01 \x01(\x01\x12&\n\x08\x63\x65ntroid\x18\x02 \x01(\x0b\x32\x14.aimdk.protocol.Vec3\"F\n\nRobotState\x12\x0f\n\x07\x65stoped\x18\x01 \x01(\x05\x12\x11\n\tpoweredOn\x18\x02 \x01(\x05\x12\x14\n\x0cservoEnabled\x18\x03 \x01(\x05\"x\n\x10JointMonitorData\x12\x13\n\x0binstCurrent\x18\x01 \x01(\x01\x12\x13\n\x0binstVoltage\x18\x02 \x01(\x01\x12\x17\n\x0finstTemperature\x18\x03 \x01(\x01\x12\x0f\n\x07instVel\x18\x04 \x01(\x01\x12\x10\n\x08instTorq\x18\x05 \x01(\x01\"\xd2\x01\n\x10RobotMonitorData\x12\x17\n\x0fscbMajorVersion\x18\x01 \x01(\x01\x12\x17\n\x0fscbMinorVersion\x18\x02 \x01(\x01\x12\x16\n\x0e\x63\x61\x62Temperature\x18\x03 \x01(\x01\x12\x19\n\x11robotAveragePower\x18\x04 \x01(\x01\x12\x1b\n\x13robotAverageCurrent\x18\x05 \x01(\x01\x12<\n\x12joint_monitor_data\x18\x06 \x03(\x0b\x32 .aimdk.protocol.JointMonitorData\"\xb4\x01\n\x16TorqSernsorMonitorData\x12\n\n\x02ip\x18\x01 \x01(\t\x12\x0c\n\x04port\x18\x02 \x01(\x05\x12(\n\x07payLoad\x18\x03 \x01(\x0b\x32\x17.aimdk.protocol.PayLoad\x12\x0e\n\x06status\x18\x04 \x01(\x05\x12\x0f\n\x07\x65rrcode\x18\x05 \x01(\x05\x12\x11\n\tactTorque\x18\x06 \x03(\x01\x12\x0e\n\x06torque\x18\x07 \x03(\x01\x12\x12\n\nrealTorque\x18\x08 \x03(\x01\"\xa8\x03\n\x0bRobotStatus\x12\x0f\n\x07\x65rrcode\x18\x01 \x01(\x05\x12\r\n\x05inpos\x18\x02 \x01(\x05\x12\x12\n\npowered_on\x18\x03 \x01(\x05\x12\x0f\n\x07\x65nabled\x18\x04 \x01(\x05\x12\x11\n\trapidrate\x18\x05 \x01(\x01\x12\x17\n\x0fprotective_stop\x18\x06 \x01(\x05\x12\x16\n\x0e\x65mergency_stop\x18\x07 \x01(\x05\x12\x19\n\x11\x63\x61rtesiantran_pos\x18\x08 \x03(\x01\x12\x11\n\tjoint_pos\x18\t \x03(\x01\x12\x15\n\ron_soft_limit\x18\n \x01(\r\x12\x13\n\x0b\x63ur_user_id\x18\x0b \x01(\r\x12\x13\n\x0b\x64rag_status\x18\x0c \x01(\x05\x12<\n\x12robot_monitor_data\x18\r \x01(\x0b\x32 .aimdk.protocol.RobotMonitorData\x12H\n\x18torq_sensor_monitor_data\x18\x0e \x01(\x0b\x32&.aimdk.protocol.TorqSernsorMonitorData\x12\x19\n\x11is_socket_connect\x18\x0f \x01(\x05\"\'\n\x0cOptionalCond\x12\x17\n\x0f\x65xecutingLineId\x18\x01 \x01(\x05\"*\n\tErrorCode\x12\x0c\n\x04\x63ode\x18\x01 \x01(\x05\x12\x0f\n\x07message\x18\x02 \x01(\t\")\n\x13GetJointPositionReq\x12\x12\n\nrobot_name\x18\x01 \x01(\t\"]\n\x13GetJointPositionRsp\x12%\n\x03pos\x18\x01 \x01(\x0b\x32\x18.aimdk.protocol.JointPos\x12\x0f\n\x07\x65rrcode\x18\x02 \x01(\x05\x12\x0e\n\x06\x65rrmsg\x18\x03 \x01(\t\"[\n\x0cJointMoveExt\x12\x0b\n\x03\x61\x63\x63\x18\x01 \x01(\x01\x12\x0b\n\x03tol\x18\x02 \x01(\x01\x12\x31\n\x0boption_cond\x18\x03 \x01(\x0b\x32\x1c.aimdk.protocol.OptionalCond\"\xe0\x01\n\x0cJointMoveReq\x12\x12\n\nrobot_name\x18\x01 \x01(\t\x12%\n\x03pos\x18\x02 \x01(\x0b\x32\x18.aimdk.protocol.JointPos\x12&\n\x04mode\x18\x03 \x01(\x0e\x32\x18.aimdk.protocol.MoveMode\x12\x10\n\x08is_block\x18\x04 \x01(\x08\x12\x16\n\x0e\x64istance_frame\x18\x05 \x01(\x01\x12)\n\x03\x65xt\x18\x06 \x01(\x0b\x32\x1c.aimdk.protocol.JointMoveExt\x12\x18\n\x10\x65\x65_interpolation\x18\x07 \x01(\x08\"/\n\x0cJointMoveRsp\x12\x0f\n\x07\x65rrcode\x18\x01 \x01(\x05\x12\x0e\n\x06\x65rrmsg\x18\x02 \x01(\t\"\\\n\rLinearMoveExt\x12\x0b\n\x03\x61\x63\x63\x18\x01 \x01(\x01\x12\x0b\n\x03tol\x18\x02 \x01(\x01\x12\x31\n\x0boption_cond\x18\x03 \x01(\x0b\x32\x1c.aimdk.protocol.OptionalCond\"\xe5\x01\n\rLinearMoveReq\x12\x12\n\nrobot_name\x18\x01 \x01(\t\x12(\n\x04pose\x18\x02 \x01(\x0b\x32\x1a.aimdk.protocol.SE3RpyPose\x12&\n\x04mode\x18\x03 \x01(\x0e\x32\x18.aimdk.protocol.MoveMode\x12\x10\n\x08is_block\x18\x04 \x01(\x08\x12\x16\n\x0e\x64istance_frame\x18\x05 \x01(\x01\x12*\n\x03\x65xt\x18\x06 \x01(\x0b\x32\x1d.aimdk.protocol.LinearMoveExt\x12\x18\n\x10\x65\x65_interpolation\x18\x07 \x01(\x08\"0\n\rLinearMoveRsp\x12\x0f\n\x07\x65rrcode\x18\x01 \x01(\x05\x12\x0e\n\x06\x65rrmsg\x18\x02 \x01(\t\"&\n\x10GetRobotStateReq\x12\x12\n\nrobot_name\x18\x01 \x01(\t\"^\n\x10GetRobotStateRsp\x12)\n\x05state\x18\x01 \x01(\x0b\x32\x1a.aimdk.protocol.RobotState\x12\x0f\n\x07\x65rrcode\x18\x02 \x01(\x05\x12\x0e\n\x06\x65rrmsg\x18\x03 \x01(\t\"\'\n\x11GetRobotStatusReq\x12\x12\n\nrobot_name\x18\x01 \x01(\t\"a\n\x11GetRobotStatusRsp\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.aimdk.protocol.RobotStatus\x12\x0f\n\x07\x65rrcode\x18\x02 \x01(\x05\x12\x0e\n\x06\x65rrmsg\x18\x03 \x01(\t\" \n\nIsInPosReq\x12\x12\n\nrobot_name\x18\x01 \x01(\t\"@\n\nIsInPosRsp\x12\x11\n\tis_in_pos\x18\x01 \x01(\x08\x12\x0f\n\x07\x65rrcode\x18\x02 \x01(\x05\x12\x0e\n\x06\x65rrmsg\x18\x03 \x01(\t\"\"\n\x0cIsOnLimitReq\x12\x12\n\nrobot_name\x18\x01 \x01(\t\"D\n\x0cIsOnLimitRsp\x12\x13\n\x0bis_on_limit\x18\x01 \x01(\x08\x12\x0f\n\x07\x65rrcode\x18\x02 \x01(\x05\x12\x0e\n\x06\x65rrmsg\x18\x03 \x01(\t\"\'\n\x11GetTcpPositionReq\x12\x12\n\nrobot_name\x18\x01 \x01(\t\"^\n\x11GetTcpPositionRsp\x12(\n\x04pose\x18\x01 \x01(\x0b\x32\x1a.aimdk.protocol.SE3RpyPose\x12\x0f\n\x07\x65rrcode\x18\x02 \x01(\x05\x12\x0e\n\x06\x65rrmsg\x18\x03 \x01(\t\"K\n\x0eKineForwardReq\x12\x12\n\nrobot_name\x18\x01 \x01(\t\x12%\n\x03pos\x18\x02 \x01(\x0b\x32\x18.aimdk.protocol.JointPos\"[\n\x0eKineForwardRsp\x12(\n\x04pose\x18\x01 \x01(\x0b\x32\x1a.aimdk.protocol.SE3RpyPose\x12\x0f\n\x07\x65rrcode\x18\x02 \x01(\x05\x12\x0e\n\x06\x65rrmsg\x18\x03 \x01(\t\"y\n\x0eKineInverseReq\x12\x12\n\nrobot_name\x18\x01 \x01(\t\x12)\n\x07ref_pos\x18\x02 \x01(\x0b\x32\x18.aimdk.protocol.JointPos\x12(\n\x04pose\x18\x03 \x01(\x0b\x32\x1a.aimdk.protocol.SE3RpyPose\"X\n\x0eKineInverseRsp\x12%\n\x03pos\x18\x01 \x01(\x0b\x32\x18.aimdk.protocol.JointPos\x12\x0f\n\x07\x65rrcode\x18\x02 \x01(\x05\x12\x0e\n\x06\x65rrmsg\x18\x03 \x01(\t\"#\n\rClearErrorReq\x12\x12\n\nrobot_name\x18\x01 \x01(\t\"0\n\rClearErrorRsp\x12\x0f\n\x07\x65rrcode\x18\x01 \x01(\x05\x12\x0e\n\x06\x65rrmsg\x18\x02 \x01(\t\"%\n\x0fGetLastErrorReq\x12\x12\n\nrobot_name\x18\x01 \x01(\t\"b\n\x0fGetLastErrorRsp\x12.\n\x0brobot_error\x18\x01 \x01(\x0b\x32\x19.aimdk.protocol.ErrorCode\x12\x0f\n\x07\x65rrcode\x18\x02 \x01(\x05\x12\x0e\n\x06\x65rrmsg\x18\x03 \x01(\t\"&\n\x10IsInCollisionReq\x12\x12\n\nrobot_name\x18\x01 \x01(\t\"L\n\x10IsInCollisionRsp\x12\x17\n\x0fis_in_collision\x18\x01 \x01(\x08\x12\x0f\n\x07\x65rrcode\x18\x02 \x01(\x05\x12\x0e\n\x06\x65rrmsg\x18\x03 \x01(\t\")\n\x13\x43ollisionRecoverReq\x12\x12\n\nrobot_name\x18\x01 \x01(\t\"6\n\x13\x43ollisionRecoverRsp\x12\x0f\n\x07\x65rrcode\x18\x01 \x01(\x05\x12\x0e\n\x06\x65rrmsg\x18\x02 \x01(\t\"*\n\x14GetCollisionLevelReq\x12\x12\n\nrobot_name\x18\x01 \x01(\t\"F\n\x14GetCollisionLevelRsp\x12\r\n\x05level\x18\x01 \x01(\x05\x12\x0f\n\x07\x65rrcode\x18\x02 \x01(\x05\x12\x0e\n\x06\x65rrmsg\x18\x03 \x01(\t\"9\n\x14SetCollisionLevelReq\x12\x12\n\nrobot_name\x18\x01 \x01(\t\x12\r\n\x05level\x18\x02 \x01(\x05\"7\n\x14SetCollisionLevelRsp\x12\x0f\n\x07\x65rrcode\x18\x01 \x01(\x05\x12\x0e\n\x06\x65rrmsg\x18\x02 \x01(\t\";\n\x15\x45nableServoControlReq\x12\x12\n\nrobot_name\x18\x01 \x01(\t\x12\x0e\n\x06\x65nable\x18\x02 \x01(\x08\"8\n\x15\x45nableServoControlRsp\x12\x0f\n\x07\x65rrcode\x18\x01 \x01(\x05\x12\x0e\n\x06\x65rrmsg\x18\x02 \x01(\t\"L\n\x16\x45nableTorqueControlReq\x12\x12\n\nrobot_name\x18\x01 \x01(\t\x12\x0e\n\x06\x65nable\x18\x02 \x01(\x08\x12\x0e\n\x06period\x18\x03 \x01(\x05\"9\n\x16\x45nableTorqueControlRsp\x12\x0f\n\x07\x65rrcode\x18\x01 \x01(\x05\x12\x0e\n\x06\x65rrmsg\x18\x02 \x01(\t\"M\n\x17SetInstallationAngleReq\x12\x12\n\nrobot_name\x18\x01 \x01(\t\x12\x0e\n\x06\x61ngleX\x18\x02 \x01(\x01\x12\x0e\n\x06\x61ngleZ\x18\x03 \x01(\x01\":\n\x17SetInstallationAngleRsp\x12\x0f\n\x07\x65rrcode\x18\x01 \x01(\x05\x12\x0e\n\x06\x65rrmsg\x18\x02 \x01(\t\"-\n\x17GetInstallationAngleReq\x12\x12\n\nrobot_name\x18\x01 \x01(\t\"\x86\x01\n\x17GetInstallationAngleRsp\x12(\n\x04quat\x18\x01 \x01(\x0b\x32\x1a.aimdk.protocol.Quaternion\x12 \n\x03rpy\x18\x02 \x01(\x0b\x32\x13.aimdk.protocol.Rpy\x12\x0f\n\x07\x65rrcode\x18\x03 \x01(\x05\x12\x0e\n\x06\x65rrmsg\x18\x04 \x01(\t\"B\n\x17\x45nableAdmittanceCtrlReq\x12\x12\n\nrobot_name\x18\x01 \x01(\t\x12\x13\n\x0b\x65nable_flag\x18\x02 \x01(\x08\":\n\x17\x45nableAdmittanceCtrlRsp\x12\x0f\n\x07\x65rrcode\x18\x01 \x01(\x05\x12\x0e\n\x06\x65rrmsg\x18\x02 \x01(\t\"8\n\x11SetFtCtrlFrameReq\x12\x12\n\nrobot_name\x18\x01 \x01(\t\x12\x0f\n\x07\x66tFrame\x18\x02 \x01(\x05\"4\n\x11SetFtCtrlFrameRsp\x12\x0f\n\x07\x65rrcode\x18\x01 \x01(\x05\x12\x0e\n\x06\x65rrmsg\x18\x02 \x01(\t\",\n\x16\x44isableForceControlReq\x12\x12\n\nrobot_name\x18\x01 \x01(\t\"9\n\x16\x44isableForceControlRsp\x12\x0f\n\x07\x65rrcode\x18\x01 \x01(\x05\x12\x0e\n\x06\x65rrmsg\x18\x02 \x01(\t\"_\n\x13SetCompliantTypeReq\x12\x12\n\nrobot_name\x18\x01 \x01(\t\x12\x1b\n\x13sensor_compensation\x18\x02 \x01(\x05\x12\x17\n\x0f\x63ompliance_type\x18\x03 \x01(\x05\"6\n\x13SetCompliantTypeRsp\x12\x0f\n\x07\x65rrcode\x18\x01 \x01(\x05\x12\x0e\n\x06\x65rrmsg\x18\x02 \x01(\t\"A\n\x16SetTorqueSensorModeReq\x12\x12\n\nrobot_name\x18\x01 \x01(\t\x12\x13\n\x0bsensor_mode\x18\x02 \x01(\x05\"9\n\x16SetTorqueSensorModeRsp\x12\x0f\n\x07\x65rrcode\x18\x01 \x01(\x05\x12\x0e\n\x06\x65rrmsg\x18\x02 \x01(\t\"\x97\x01\n\x15SetAdmitCtrlConfigReq\x12\x12\n\nrobot_name\x18\x01 \x01(\t\x12\x0c\n\x04\x61xis\x18\x02 \x01(\x05\x12\x0b\n\x03opt\x18\x03 \x01(\x05\x12\x0e\n\x06\x66tUser\x18\x04 \x01(\x01\x12\x12\n\nftConstant\x18\x05 \x01(\x01\x12\x16\n\x0e\x66tNnormalTrack\x18\x06 \x01(\x05\x12\x13\n\x0b\x66tReboundFK\x18\x07 \x01(\x01\"8\n\x15SetAdmitCtrlConfigRsp\x12\x0f\n\x07\x65rrcode\x18\x01 \x01(\x05\x12\x0e\n\x06\x65rrmsg\x18\x02 \x01(\t*F\n\x08MoveMode\x12\x10\n\x0cMoveMode_ABS\x10\x00\x12\x11\n\rMoveMode_INCR\x10\x01\x12\x15\n\x11MoveMode_CONTINUE\x10\x02\x32\xbc\x12\n\x14\x41\x32wArmControlService\x12\\\n\x10GetJointPosition\x12#.aimdk.protocol.GetJointPositionReq\x1a#.aimdk.protocol.GetJointPositionRsp\x12G\n\tJointMove\x12\x1c.aimdk.protocol.JointMoveReq\x1a\x1c.aimdk.protocol.JointMoveRsp\x12J\n\nLinearMove\x12\x1d.aimdk.protocol.LinearMoveReq\x1a\x1d.aimdk.protocol.LinearMoveRsp\x12S\n\rGetRobotState\x12 .aimdk.protocol.GetRobotStateReq\x1a .aimdk.protocol.GetRobotStateRsp\x12V\n\x0eGetRobotStatus\x12!.aimdk.protocol.GetRobotStatusReq\x1a!.aimdk.protocol.GetRobotStatusRsp\x12\x41\n\x07IsInPos\x12\x1a.aimdk.protocol.IsInPosReq\x1a\x1a.aimdk.protocol.IsInPosRsp\x12G\n\tIsOnLimit\x12\x1c.aimdk.protocol.IsOnLimitReq\x1a\x1c.aimdk.protocol.IsOnLimitRsp\x12V\n\x0eGetTcpPosition\x12!.aimdk.protocol.GetTcpPositionReq\x1a!.aimdk.protocol.GetTcpPositionRsp\x12M\n\x0bKineForward\x12\x1e.aimdk.protocol.KineForwardReq\x1a\x1e.aimdk.protocol.KineForwardRsp\x12M\n\x0bKineInverse\x12\x1e.aimdk.protocol.KineInverseReq\x1a\x1e.aimdk.protocol.KineInverseRsp\x12J\n\nClearError\x12\x1d.aimdk.protocol.ClearErrorReq\x1a\x1d.aimdk.protocol.ClearErrorRsp\x12P\n\x0cGetLastError\x12\x1f.aimdk.protocol.GetLastErrorReq\x1a\x1f.aimdk.protocol.GetLastErrorRsp\x12S\n\rIsInCollision\x12 .aimdk.protocol.IsInCollisionReq\x1a .aimdk.protocol.IsInCollisionRsp\x12\\\n\x10\x43ollisionRecover\x12#.aimdk.protocol.CollisionRecoverReq\x1a#.aimdk.protocol.CollisionRecoverRsp\x12_\n\x11GetCollisionLevel\x12$.aimdk.protocol.GetCollisionLevelReq\x1a$.aimdk.protocol.GetCollisionLevelRsp\x12_\n\x11SetCollisionLevel\x12$.aimdk.protocol.SetCollisionLevelReq\x1a$.aimdk.protocol.SetCollisionLevelRsp\x12\x62\n\x12\x45nableServoControl\x12%.aimdk.protocol.EnableServoControlReq\x1a%.aimdk.protocol.EnableServoControlRsp\x12\x65\n\x13\x45nableTorqueControl\x12&.aimdk.protocol.EnableTorqueControlReq\x1a&.aimdk.protocol.EnableTorqueControlRsp\x12h\n\x14SetInstallationAngle\x12\'.aimdk.protocol.SetInstallationAngleReq\x1a\'.aimdk.protocol.SetInstallationAngleRsp\x12h\n\x14GetInstallationAngle\x12\'.aimdk.protocol.GetInstallationAngleReq\x1a\'.aimdk.protocol.GetInstallationAngleRsp\x12h\n\x14\x45nableAdmittanceCtrl\x12\'.aimdk.protocol.EnableAdmittanceCtrlReq\x1a\'.aimdk.protocol.EnableAdmittanceCtrlRsp\x12V\n\x0eSetFtCtrlFrame\x12!.aimdk.protocol.SetFtCtrlFrameReq\x1a!.aimdk.protocol.SetFtCtrlFrameRsp\x12\x65\n\x13\x44isableForceControl\x12&.aimdk.protocol.DisableForceControlReq\x1a&.aimdk.protocol.DisableForceControlRsp\x12\\\n\x10SetCompliantType\x12#.aimdk.protocol.SetCompliantTypeReq\x1a#.aimdk.protocol.SetCompliantTypeRsp\x12\x65\n\x13SetTorqueSensorMode\x12&.aimdk.protocol.SetTorqueSensorModeReq\x1a&.aimdk.protocol.SetTorqueSensorModeRsp\x12\x62\n\x12SetAdmitCtrlConfig\x12%.aimdk.protocol.SetAdmitCtrlConfigReq\x1a%.aimdk.protocol.SetAdmitCtrlConfigRspP\x00P\x01P\x02P\x03\x62\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'aimdk.protocol.hal.jaka.jaka_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals['_MOVEMODE']._serialized_start=5367 + _globals['_MOVEMODE']._serialized_end=5437 + _globals['_JOINTPOS']._serialized_start=199 + _globals['_JOINTPOS']._serialized_end=223 + _globals['_PAYLOAD']._serialized_start=225 + _globals['_PAYLOAD']._serialized_end=288 + _globals['_ROBOTSTATE']._serialized_start=290 + _globals['_ROBOTSTATE']._serialized_end=360 + _globals['_JOINTMONITORDATA']._serialized_start=362 + _globals['_JOINTMONITORDATA']._serialized_end=482 + _globals['_ROBOTMONITORDATA']._serialized_start=485 + _globals['_ROBOTMONITORDATA']._serialized_end=695 + _globals['_TORQSERNSORMONITORDATA']._serialized_start=698 + _globals['_TORQSERNSORMONITORDATA']._serialized_end=878 + _globals['_ROBOTSTATUS']._serialized_start=881 + _globals['_ROBOTSTATUS']._serialized_end=1305 + _globals['_OPTIONALCOND']._serialized_start=1307 + _globals['_OPTIONALCOND']._serialized_end=1346 + _globals['_ERRORCODE']._serialized_start=1348 + _globals['_ERRORCODE']._serialized_end=1390 + _globals['_GETJOINTPOSITIONREQ']._serialized_start=1392 + _globals['_GETJOINTPOSITIONREQ']._serialized_end=1433 + _globals['_GETJOINTPOSITIONRSP']._serialized_start=1435 + _globals['_GETJOINTPOSITIONRSP']._serialized_end=1528 + _globals['_JOINTMOVEEXT']._serialized_start=1530 + _globals['_JOINTMOVEEXT']._serialized_end=1621 + _globals['_JOINTMOVEREQ']._serialized_start=1624 + _globals['_JOINTMOVEREQ']._serialized_end=1848 + _globals['_JOINTMOVERSP']._serialized_start=1850 + _globals['_JOINTMOVERSP']._serialized_end=1897 + _globals['_LINEARMOVEEXT']._serialized_start=1899 + _globals['_LINEARMOVEEXT']._serialized_end=1991 + _globals['_LINEARMOVEREQ']._serialized_start=1994 + _globals['_LINEARMOVEREQ']._serialized_end=2223 + _globals['_LINEARMOVERSP']._serialized_start=2225 + _globals['_LINEARMOVERSP']._serialized_end=2273 + _globals['_GETROBOTSTATEREQ']._serialized_start=2275 + _globals['_GETROBOTSTATEREQ']._serialized_end=2313 + _globals['_GETROBOTSTATERSP']._serialized_start=2315 + _globals['_GETROBOTSTATERSP']._serialized_end=2409 + _globals['_GETROBOTSTATUSREQ']._serialized_start=2411 + _globals['_GETROBOTSTATUSREQ']._serialized_end=2450 + _globals['_GETROBOTSTATUSRSP']._serialized_start=2452 + _globals['_GETROBOTSTATUSRSP']._serialized_end=2549 + _globals['_ISINPOSREQ']._serialized_start=2551 + _globals['_ISINPOSREQ']._serialized_end=2583 + _globals['_ISINPOSRSP']._serialized_start=2585 + _globals['_ISINPOSRSP']._serialized_end=2649 + _globals['_ISONLIMITREQ']._serialized_start=2651 + _globals['_ISONLIMITREQ']._serialized_end=2685 + _globals['_ISONLIMITRSP']._serialized_start=2687 + _globals['_ISONLIMITRSP']._serialized_end=2755 + _globals['_GETTCPPOSITIONREQ']._serialized_start=2757 + _globals['_GETTCPPOSITIONREQ']._serialized_end=2796 + _globals['_GETTCPPOSITIONRSP']._serialized_start=2798 + _globals['_GETTCPPOSITIONRSP']._serialized_end=2892 + _globals['_KINEFORWARDREQ']._serialized_start=2894 + _globals['_KINEFORWARDREQ']._serialized_end=2969 + _globals['_KINEFORWARDRSP']._serialized_start=2971 + _globals['_KINEFORWARDRSP']._serialized_end=3062 + _globals['_KINEINVERSEREQ']._serialized_start=3064 + _globals['_KINEINVERSEREQ']._serialized_end=3185 + _globals['_KINEINVERSERSP']._serialized_start=3187 + _globals['_KINEINVERSERSP']._serialized_end=3275 + _globals['_CLEARERRORREQ']._serialized_start=3277 + _globals['_CLEARERRORREQ']._serialized_end=3312 + _globals['_CLEARERRORRSP']._serialized_start=3314 + _globals['_CLEARERRORRSP']._serialized_end=3362 + _globals['_GETLASTERRORREQ']._serialized_start=3364 + _globals['_GETLASTERRORREQ']._serialized_end=3401 + _globals['_GETLASTERRORRSP']._serialized_start=3403 + _globals['_GETLASTERRORRSP']._serialized_end=3501 + _globals['_ISINCOLLISIONREQ']._serialized_start=3503 + _globals['_ISINCOLLISIONREQ']._serialized_end=3541 + _globals['_ISINCOLLISIONRSP']._serialized_start=3543 + _globals['_ISINCOLLISIONRSP']._serialized_end=3619 + _globals['_COLLISIONRECOVERREQ']._serialized_start=3621 + _globals['_COLLISIONRECOVERREQ']._serialized_end=3662 + _globals['_COLLISIONRECOVERRSP']._serialized_start=3664 + _globals['_COLLISIONRECOVERRSP']._serialized_end=3718 + _globals['_GETCOLLISIONLEVELREQ']._serialized_start=3720 + _globals['_GETCOLLISIONLEVELREQ']._serialized_end=3762 + _globals['_GETCOLLISIONLEVELRSP']._serialized_start=3764 + _globals['_GETCOLLISIONLEVELRSP']._serialized_end=3834 + _globals['_SETCOLLISIONLEVELREQ']._serialized_start=3836 + _globals['_SETCOLLISIONLEVELREQ']._serialized_end=3893 + _globals['_SETCOLLISIONLEVELRSP']._serialized_start=3895 + _globals['_SETCOLLISIONLEVELRSP']._serialized_end=3950 + _globals['_ENABLESERVOCONTROLREQ']._serialized_start=3952 + _globals['_ENABLESERVOCONTROLREQ']._serialized_end=4011 + _globals['_ENABLESERVOCONTROLRSP']._serialized_start=4013 + _globals['_ENABLESERVOCONTROLRSP']._serialized_end=4069 + _globals['_ENABLETORQUECONTROLREQ']._serialized_start=4071 + _globals['_ENABLETORQUECONTROLREQ']._serialized_end=4147 + _globals['_ENABLETORQUECONTROLRSP']._serialized_start=4149 + _globals['_ENABLETORQUECONTROLRSP']._serialized_end=4206 + _globals['_SETINSTALLATIONANGLEREQ']._serialized_start=4208 + _globals['_SETINSTALLATIONANGLEREQ']._serialized_end=4285 + _globals['_SETINSTALLATIONANGLERSP']._serialized_start=4287 + _globals['_SETINSTALLATIONANGLERSP']._serialized_end=4345 + _globals['_GETINSTALLATIONANGLEREQ']._serialized_start=4347 + _globals['_GETINSTALLATIONANGLEREQ']._serialized_end=4392 + _globals['_GETINSTALLATIONANGLERSP']._serialized_start=4395 + _globals['_GETINSTALLATIONANGLERSP']._serialized_end=4529 + _globals['_ENABLEADMITTANCECTRLREQ']._serialized_start=4531 + _globals['_ENABLEADMITTANCECTRLREQ']._serialized_end=4597 + _globals['_ENABLEADMITTANCECTRLRSP']._serialized_start=4599 + _globals['_ENABLEADMITTANCECTRLRSP']._serialized_end=4657 + _globals['_SETFTCTRLFRAMEREQ']._serialized_start=4659 + _globals['_SETFTCTRLFRAMEREQ']._serialized_end=4715 + _globals['_SETFTCTRLFRAMERSP']._serialized_start=4717 + _globals['_SETFTCTRLFRAMERSP']._serialized_end=4769 + _globals['_DISABLEFORCECONTROLREQ']._serialized_start=4771 + _globals['_DISABLEFORCECONTROLREQ']._serialized_end=4815 + _globals['_DISABLEFORCECONTROLRSP']._serialized_start=4817 + _globals['_DISABLEFORCECONTROLRSP']._serialized_end=4874 + _globals['_SETCOMPLIANTTYPEREQ']._serialized_start=4876 + _globals['_SETCOMPLIANTTYPEREQ']._serialized_end=4971 + _globals['_SETCOMPLIANTTYPERSP']._serialized_start=4973 + _globals['_SETCOMPLIANTTYPERSP']._serialized_end=5027 + _globals['_SETTORQUESENSORMODEREQ']._serialized_start=5029 + _globals['_SETTORQUESENSORMODEREQ']._serialized_end=5094 + _globals['_SETTORQUESENSORMODERSP']._serialized_start=5096 + _globals['_SETTORQUESENSORMODERSP']._serialized_end=5153 + _globals['_SETADMITCTRLCONFIGREQ']._serialized_start=5156 + _globals['_SETADMITCTRLCONFIGREQ']._serialized_end=5307 + _globals['_SETADMITCTRLCONFIGRSP']._serialized_start=5309 + _globals['_SETADMITCTRLCONFIGRSP']._serialized_end=5365 + _globals['_A2WARMCONTROLSERVICE']._serialized_start=5440 + _globals['_A2WARMCONTROLSERVICE']._serialized_end=7804 +# @@protoc_insertion_point(module_scope) diff --git a/data_gen_dependencies/aimdk/protocol/hal/jaka/jaka_pb2_grpc.py b/data_gen_dependencies/aimdk/protocol/hal/jaka/jaka_pb2_grpc.py new file mode 100644 index 0000000..5d3f17c --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/jaka/jaka_pb2_grpc.py @@ -0,0 +1,1001 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from aimdk.protocol.hal.jaka import jaka_pb2 as aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2 + + +class A2wArmControlServiceStub(object): + """* + @brief 由 aimrt_hal 中的 jaka_module::JakaModule 模块提供的服务 + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.GetJointPosition = channel.unary_unary( + '/aimdk.protocol.A2wArmControlService/GetJointPosition', + request_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetJointPositionReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetJointPositionRsp.FromString, + ) + self.JointMove = channel.unary_unary( + '/aimdk.protocol.A2wArmControlService/JointMove', + request_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.JointMoveReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.JointMoveRsp.FromString, + ) + self.LinearMove = channel.unary_unary( + '/aimdk.protocol.A2wArmControlService/LinearMove', + request_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.LinearMoveReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.LinearMoveRsp.FromString, + ) + self.GetRobotState = channel.unary_unary( + '/aimdk.protocol.A2wArmControlService/GetRobotState', + request_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetRobotStateReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetRobotStateRsp.FromString, + ) + self.GetRobotStatus = channel.unary_unary( + '/aimdk.protocol.A2wArmControlService/GetRobotStatus', + request_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetRobotStatusReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetRobotStatusRsp.FromString, + ) + self.IsInPos = channel.unary_unary( + '/aimdk.protocol.A2wArmControlService/IsInPos', + request_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.IsInPosReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.IsInPosRsp.FromString, + ) + self.IsOnLimit = channel.unary_unary( + '/aimdk.protocol.A2wArmControlService/IsOnLimit', + request_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.IsOnLimitReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.IsOnLimitRsp.FromString, + ) + self.GetTcpPosition = channel.unary_unary( + '/aimdk.protocol.A2wArmControlService/GetTcpPosition', + request_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetTcpPositionReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetTcpPositionRsp.FromString, + ) + self.KineForward = channel.unary_unary( + '/aimdk.protocol.A2wArmControlService/KineForward', + request_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.KineForwardReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.KineForwardRsp.FromString, + ) + self.KineInverse = channel.unary_unary( + '/aimdk.protocol.A2wArmControlService/KineInverse', + request_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.KineInverseReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.KineInverseRsp.FromString, + ) + self.ClearError = channel.unary_unary( + '/aimdk.protocol.A2wArmControlService/ClearError', + request_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.ClearErrorReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.ClearErrorRsp.FromString, + ) + self.GetLastError = channel.unary_unary( + '/aimdk.protocol.A2wArmControlService/GetLastError', + request_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetLastErrorReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetLastErrorRsp.FromString, + ) + self.IsInCollision = channel.unary_unary( + '/aimdk.protocol.A2wArmControlService/IsInCollision', + request_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.IsInCollisionReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.IsInCollisionRsp.FromString, + ) + self.CollisionRecover = channel.unary_unary( + '/aimdk.protocol.A2wArmControlService/CollisionRecover', + request_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.CollisionRecoverReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.CollisionRecoverRsp.FromString, + ) + self.GetCollisionLevel = channel.unary_unary( + '/aimdk.protocol.A2wArmControlService/GetCollisionLevel', + request_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetCollisionLevelReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetCollisionLevelRsp.FromString, + ) + self.SetCollisionLevel = channel.unary_unary( + '/aimdk.protocol.A2wArmControlService/SetCollisionLevel', + request_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetCollisionLevelReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetCollisionLevelRsp.FromString, + ) + self.EnableServoControl = channel.unary_unary( + '/aimdk.protocol.A2wArmControlService/EnableServoControl', + request_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.EnableServoControlReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.EnableServoControlRsp.FromString, + ) + self.EnableTorqueControl = channel.unary_unary( + '/aimdk.protocol.A2wArmControlService/EnableTorqueControl', + request_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.EnableTorqueControlReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.EnableTorqueControlRsp.FromString, + ) + self.SetInstallationAngle = channel.unary_unary( + '/aimdk.protocol.A2wArmControlService/SetInstallationAngle', + request_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetInstallationAngleReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetInstallationAngleRsp.FromString, + ) + self.GetInstallationAngle = channel.unary_unary( + '/aimdk.protocol.A2wArmControlService/GetInstallationAngle', + request_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetInstallationAngleReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetInstallationAngleRsp.FromString, + ) + self.EnableAdmittanceCtrl = channel.unary_unary( + '/aimdk.protocol.A2wArmControlService/EnableAdmittanceCtrl', + request_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.EnableAdmittanceCtrlReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.EnableAdmittanceCtrlRsp.FromString, + ) + self.SetFtCtrlFrame = channel.unary_unary( + '/aimdk.protocol.A2wArmControlService/SetFtCtrlFrame', + request_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetFtCtrlFrameReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetFtCtrlFrameRsp.FromString, + ) + self.DisableForceControl = channel.unary_unary( + '/aimdk.protocol.A2wArmControlService/DisableForceControl', + request_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.DisableForceControlReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.DisableForceControlRsp.FromString, + ) + self.SetCompliantType = channel.unary_unary( + '/aimdk.protocol.A2wArmControlService/SetCompliantType', + request_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetCompliantTypeReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetCompliantTypeRsp.FromString, + ) + self.SetTorqueSensorMode = channel.unary_unary( + '/aimdk.protocol.A2wArmControlService/SetTorqueSensorMode', + request_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetTorqueSensorModeReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetTorqueSensorModeRsp.FromString, + ) + self.SetAdmitCtrlConfig = channel.unary_unary( + '/aimdk.protocol.A2wArmControlService/SetAdmitCtrlConfig', + request_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetAdmitCtrlConfigReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetAdmitCtrlConfigRsp.FromString, + ) + + +class A2wArmControlServiceServicer(object): + """* + @brief 由 aimrt_hal 中的 jaka_module::JakaModule 模块提供的服务 + """ + + def GetJointPosition(self, request, context): + """* + @brief 获取 jaka_module::JakaModule 收到的关节角度。 + @param GetJointPositionReq + @return GetJointPositionRsp + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def JointMove(self, request, context): + """* + @brief jaka_module::JakaModule 执行普通关节运动。 + @param JointMoveReq + @return JointMoveRsp + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def LinearMove(self, request, context): + """* + @brief jaka_module::JakaModule 执行末端线性运动。 + @param LinearMoveReq + @return LinearMoveRsp + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetRobotState(self, request, context): + """* + @brief 获取 jaka_module::JakaModule 收到的机械臂状态信息。 + @param GetRobotStateReq + @return GetRobotStateRsp + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetRobotStatus(self, request, context): + """* + @brief 获取 jaka_module::JakaModule 收到的机械臂详细信息。 + @param GetRobotStatusReq + @return GetRobotStatusRsp + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def IsInPos(self, request, context): + """* + @brief 获取 jaka_module::JakaModule 收到的机械臂是否到达目标点的状态信息。 + @param IsInPosReq + @return IsInPosRsp + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def IsOnLimit(self, request, context): + """* + @brief 获取 jaka_module::JakaModule 收到的机械臂是否限位的信息。 + @param IsOnLimitReq + @return IsOnLimitRsp + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetTcpPosition(self, request, context): + """* + @brief 获取 jaka_module::JakaModule 收到的末端笛卡尔位姿信息。 + @param GetTcpPositionReq + @return GetTcpPositionRsp + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def KineForward(self, request, context): + """* + @brief 通过 jaka_module::JakaModule 进行运动学正解。 + @param KineForwardReq + @return KineForwardRsp + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def KineInverse(self, request, context): + """* + @brief 通过 jaka_module::JakaModule 进行运动学逆解。 + @param KineInverseReq + @return KineInverseRsp + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ClearError(self, request, context): + """* + @brief 通过 jaka_module::JakaModule 清理错误。 + @param ClearErrorReq + @return ClearErrorRsp + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetLastError(self, request, context): + """* + @brief 获取 jaka_module::JakaModule 收到的机械臂错误信息。 + @param GetLastErrorReq + @return GetLastErrorRsp + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def IsInCollision(self, request, context): + """* + @brief 获取 jaka_module::JakaModule 收到的机械臂是否碰撞的状态信息。 + @param IsInCollisionReq + @return IsInCollisionRsp + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CollisionRecover(self, request, context): + """* + @brief 通过 jaka_module::JakaModule 进行碰撞恢复。 + @param CollisionRecoverReq + @return CollisionRecoverRsp + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetCollisionLevel(self, request, context): + """* + @brief 获取 jaka_module::JakaModule 收到的机械臂碰撞登记。 + @param GetCollisionLevelReq + @return GetCollisionLevelRsp + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SetCollisionLevel(self, request, context): + """* + @brief 通过 jaka_module::JakaModule 设置机械臂碰撞登记。 + @param SetCollisionLevelReq + @return SetCollisionLevelRsp + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def EnableServoControl(self, request, context): + """* + @brief 通过 jaka_module::JakaModule 进行伺服使能控制。 + @param EnableServoControlReq + @return EnableServoControlRsp + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def EnableTorqueControl(self, request, context): + """* + @brief 通过 jaka_module::JakaModule 进行电流环控制。 + @param EnableTorqueControlReq + @return EnableTorqueControlRsp + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SetInstallationAngle(self, request, context): + """* + @brief 通过 jaka_module::JakaModule 设置安装角度。 + @param SetInstallationAngleReq + @return SetInstallationAngleRsp + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetInstallationAngle(self, request, context): + """* + @brief 获取 jaka_module::JakaModule 收到的机械臂安装角度。 + @param GetInstallationAngleReq + @return GetInstallationAngleRsp + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def EnableAdmittanceCtrl(self, request, context): + """* + @brief 通过 jaka_module::JakaModule 设置力控拖拽使能。 + @param EnableAdmittanceCtrlReq + @return EnableAdmittanceCtrlRsp + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SetFtCtrlFrame(self, request, context): + """* + @brief 通过 jaka_module::JakaModule 设置导纳控制运动坐标系。 + @param SetFtCtrlFrameReq + @return SetFtCtrlFrameRsp + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DisableForceControl(self, request, context): + """* + @brief 通过 jaka_module::JakaModule 关闭力控。 + @param DisableForceControlReq + @return DisableForceControlRsp + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SetCompliantType(self, request, context): + """* + @brief 通过 jaka_module::JakaModule 设置力控类型和传感器初始化状态。 + @param SetCompliantTypeReq + @return SetCompliantTypeRsp + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SetTorqueSensorMode(self, request, context): + """* + @brief 通过 jaka_module::JakaModule 开启或关闭力矩传感器。 + @param SetTorqueSensorModeReq + @return SetTorqueSensorModeRsp + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SetAdmitCtrlConfig(self, request, context): + """* + @brief 通过 jaka_module::JakaModule 设置柔顺控制参数。 + @param SetAdmitCtrlConfigReq + @return SetAdmitCtrlConfigRsp + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_A2wArmControlServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'GetJointPosition': grpc.unary_unary_rpc_method_handler( + servicer.GetJointPosition, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetJointPositionReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetJointPositionRsp.SerializeToString, + ), + 'JointMove': grpc.unary_unary_rpc_method_handler( + servicer.JointMove, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.JointMoveReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.JointMoveRsp.SerializeToString, + ), + 'LinearMove': grpc.unary_unary_rpc_method_handler( + servicer.LinearMove, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.LinearMoveReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.LinearMoveRsp.SerializeToString, + ), + 'GetRobotState': grpc.unary_unary_rpc_method_handler( + servicer.GetRobotState, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetRobotStateReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetRobotStateRsp.SerializeToString, + ), + 'GetRobotStatus': grpc.unary_unary_rpc_method_handler( + servicer.GetRobotStatus, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetRobotStatusReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetRobotStatusRsp.SerializeToString, + ), + 'IsInPos': grpc.unary_unary_rpc_method_handler( + servicer.IsInPos, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.IsInPosReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.IsInPosRsp.SerializeToString, + ), + 'IsOnLimit': grpc.unary_unary_rpc_method_handler( + servicer.IsOnLimit, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.IsOnLimitReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.IsOnLimitRsp.SerializeToString, + ), + 'GetTcpPosition': grpc.unary_unary_rpc_method_handler( + servicer.GetTcpPosition, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetTcpPositionReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetTcpPositionRsp.SerializeToString, + ), + 'KineForward': grpc.unary_unary_rpc_method_handler( + servicer.KineForward, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.KineForwardReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.KineForwardRsp.SerializeToString, + ), + 'KineInverse': grpc.unary_unary_rpc_method_handler( + servicer.KineInverse, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.KineInverseReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.KineInverseRsp.SerializeToString, + ), + 'ClearError': grpc.unary_unary_rpc_method_handler( + servicer.ClearError, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.ClearErrorReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.ClearErrorRsp.SerializeToString, + ), + 'GetLastError': grpc.unary_unary_rpc_method_handler( + servicer.GetLastError, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetLastErrorReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetLastErrorRsp.SerializeToString, + ), + 'IsInCollision': grpc.unary_unary_rpc_method_handler( + servicer.IsInCollision, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.IsInCollisionReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.IsInCollisionRsp.SerializeToString, + ), + 'CollisionRecover': grpc.unary_unary_rpc_method_handler( + servicer.CollisionRecover, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.CollisionRecoverReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.CollisionRecoverRsp.SerializeToString, + ), + 'GetCollisionLevel': grpc.unary_unary_rpc_method_handler( + servicer.GetCollisionLevel, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetCollisionLevelReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetCollisionLevelRsp.SerializeToString, + ), + 'SetCollisionLevel': grpc.unary_unary_rpc_method_handler( + servicer.SetCollisionLevel, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetCollisionLevelReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetCollisionLevelRsp.SerializeToString, + ), + 'EnableServoControl': grpc.unary_unary_rpc_method_handler( + servicer.EnableServoControl, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.EnableServoControlReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.EnableServoControlRsp.SerializeToString, + ), + 'EnableTorqueControl': grpc.unary_unary_rpc_method_handler( + servicer.EnableTorqueControl, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.EnableTorqueControlReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.EnableTorqueControlRsp.SerializeToString, + ), + 'SetInstallationAngle': grpc.unary_unary_rpc_method_handler( + servicer.SetInstallationAngle, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetInstallationAngleReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetInstallationAngleRsp.SerializeToString, + ), + 'GetInstallationAngle': grpc.unary_unary_rpc_method_handler( + servicer.GetInstallationAngle, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetInstallationAngleReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetInstallationAngleRsp.SerializeToString, + ), + 'EnableAdmittanceCtrl': grpc.unary_unary_rpc_method_handler( + servicer.EnableAdmittanceCtrl, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.EnableAdmittanceCtrlReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.EnableAdmittanceCtrlRsp.SerializeToString, + ), + 'SetFtCtrlFrame': grpc.unary_unary_rpc_method_handler( + servicer.SetFtCtrlFrame, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetFtCtrlFrameReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetFtCtrlFrameRsp.SerializeToString, + ), + 'DisableForceControl': grpc.unary_unary_rpc_method_handler( + servicer.DisableForceControl, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.DisableForceControlReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.DisableForceControlRsp.SerializeToString, + ), + 'SetCompliantType': grpc.unary_unary_rpc_method_handler( + servicer.SetCompliantType, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetCompliantTypeReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetCompliantTypeRsp.SerializeToString, + ), + 'SetTorqueSensorMode': grpc.unary_unary_rpc_method_handler( + servicer.SetTorqueSensorMode, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetTorqueSensorModeReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetTorqueSensorModeRsp.SerializeToString, + ), + 'SetAdmitCtrlConfig': grpc.unary_unary_rpc_method_handler( + servicer.SetAdmitCtrlConfig, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetAdmitCtrlConfigReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetAdmitCtrlConfigRsp.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'aimdk.protocol.A2wArmControlService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class A2wArmControlService(object): + """* + @brief 由 aimrt_hal 中的 jaka_module::JakaModule 模块提供的服务 + """ + + @staticmethod + def GetJointPosition(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.A2wArmControlService/GetJointPosition', + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetJointPositionReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetJointPositionRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def JointMove(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.A2wArmControlService/JointMove', + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.JointMoveReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.JointMoveRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def LinearMove(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.A2wArmControlService/LinearMove', + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.LinearMoveReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.LinearMoveRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetRobotState(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.A2wArmControlService/GetRobotState', + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetRobotStateReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetRobotStateRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetRobotStatus(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.A2wArmControlService/GetRobotStatus', + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetRobotStatusReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetRobotStatusRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def IsInPos(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.A2wArmControlService/IsInPos', + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.IsInPosReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.IsInPosRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def IsOnLimit(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.A2wArmControlService/IsOnLimit', + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.IsOnLimitReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.IsOnLimitRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetTcpPosition(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.A2wArmControlService/GetTcpPosition', + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetTcpPositionReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetTcpPositionRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def KineForward(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.A2wArmControlService/KineForward', + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.KineForwardReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.KineForwardRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def KineInverse(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.A2wArmControlService/KineInverse', + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.KineInverseReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.KineInverseRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ClearError(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.A2wArmControlService/ClearError', + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.ClearErrorReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.ClearErrorRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetLastError(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.A2wArmControlService/GetLastError', + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetLastErrorReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetLastErrorRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def IsInCollision(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.A2wArmControlService/IsInCollision', + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.IsInCollisionReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.IsInCollisionRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def CollisionRecover(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.A2wArmControlService/CollisionRecover', + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.CollisionRecoverReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.CollisionRecoverRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetCollisionLevel(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.A2wArmControlService/GetCollisionLevel', + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetCollisionLevelReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetCollisionLevelRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def SetCollisionLevel(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.A2wArmControlService/SetCollisionLevel', + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetCollisionLevelReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetCollisionLevelRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def EnableServoControl(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.A2wArmControlService/EnableServoControl', + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.EnableServoControlReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.EnableServoControlRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def EnableTorqueControl(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.A2wArmControlService/EnableTorqueControl', + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.EnableTorqueControlReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.EnableTorqueControlRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def SetInstallationAngle(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.A2wArmControlService/SetInstallationAngle', + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetInstallationAngleReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetInstallationAngleRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetInstallationAngle(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.A2wArmControlService/GetInstallationAngle', + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetInstallationAngleReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.GetInstallationAngleRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def EnableAdmittanceCtrl(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.A2wArmControlService/EnableAdmittanceCtrl', + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.EnableAdmittanceCtrlReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.EnableAdmittanceCtrlRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def SetFtCtrlFrame(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.A2wArmControlService/SetFtCtrlFrame', + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetFtCtrlFrameReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetFtCtrlFrameRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def DisableForceControl(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.A2wArmControlService/DisableForceControl', + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.DisableForceControlReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.DisableForceControlRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def SetCompliantType(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.A2wArmControlService/SetCompliantType', + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetCompliantTypeReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetCompliantTypeRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def SetTorqueSensorMode(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.A2wArmControlService/SetTorqueSensorMode', + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetTorqueSensorModeReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetTorqueSensorModeRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def SetAdmitCtrlConfig(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.A2wArmControlService/SetAdmitCtrlConfig', + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetAdmitCtrlConfigReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_jaka_dot_jaka__pb2.SetAdmitCtrlConfigRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/data_gen_dependencies/aimdk/protocol/hal/joint/joint_channel.proto b/data_gen_dependencies/aimdk/protocol/hal/joint/joint_channel.proto new file mode 100644 index 0000000..d485ca6 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/joint/joint_channel.proto @@ -0,0 +1,69 @@ +syntax = "proto3"; +package aimdk.protocol; + +import public "aimdk/protocol/common/header.proto"; +import public "aimdk/protocol/common/joint.proto"; +import public "aimdk/protocol/common/se3_pose.proto"; + +message JointStatesChannel { + /** 消息头 */ + Header header = 1; + + /** + * 关节状态,包括角度和速度、扭矩 + */ + repeated JointState states = 2; +} +message IKStatus{ + repeated string joint_names=1; + repeated double joint_positions=2; + bool isSuccess=3; +} + +message JointCommandsChannel { + /** 消息头 */ + Header header = 1; + + /** + * 关节指令 + */ + repeated JointCommand commands = 2; +} + +message GetJointReq { + string serial_no = 1; +} +message GetJointRsp { + repeated JointState states = 1; +} +message GetEEPoseReq { + bool is_right = 1; +} +message GetEEPoseRsp { + string prim_path = 1; + SE3RpyPose ee_pose = 2; +} +message GetIKStatusReq { + bool is_right = 1; + repeated SE3RpyPose target_pose = 2; + bool ObsAvoid = 3; +} +message GetIKStatusRsp { + repeated IKStatus IKStatus = 2; +} + + +message SetJointReq { + repeated JointCommand commands = 1; + bool is_trajectory = 2; +} +message SetJointRsp { + string errmsg = 1; +} + +service JointControlService { + rpc GetJointPosition(GetJointReq) returns(GetJointRsp); + rpc SetJointPosition(SetJointReq) returns(SetJointRsp); + rpc GetIKStatus(GetIKStatusReq) returns(GetIKStatusRsp); + rpc GetEEPose(GetEEPoseReq) returns(GetEEPoseRsp); +} \ No newline at end of file diff --git a/data_gen_dependencies/aimdk/protocol/hal/joint/joint_channel_pb2.py b/data_gen_dependencies/aimdk/protocol/hal/joint/joint_channel_pb2.py new file mode 100644 index 0000000..818cb4d --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/joint/joint_channel_pb2.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: aimdk/protocol/hal/joint/joint_channel.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from aimdk.protocol.common import header_pb2 as aimdk_dot_protocol_dot_common_dot_header__pb2 +from aimdk.protocol.common import joint_pb2 as aimdk_dot_protocol_dot_common_dot_joint__pb2 +from aimdk.protocol.common import se3_pose_pb2 as aimdk_dot_protocol_dot_common_dot_se3__pose__pb2 +try: + aimdk_dot_protocol_dot_common_dot_vec3__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk_dot_protocol_dot_common_dot_vec3__pb2 +except AttributeError: + aimdk_dot_protocol_dot_common_dot_vec3__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk.protocol.common.vec3_pb2 +try: + aimdk_dot_protocol_dot_common_dot_quaternion__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk_dot_protocol_dot_common_dot_quaternion__pb2 +except AttributeError: + aimdk_dot_protocol_dot_common_dot_quaternion__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk.protocol.common.quaternion_pb2 +try: + aimdk_dot_protocol_dot_common_dot_rpy__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk_dot_protocol_dot_common_dot_rpy__pb2 +except AttributeError: + aimdk_dot_protocol_dot_common_dot_rpy__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk.protocol.common.rpy_pb2 + +from aimdk.protocol.common.header_pb2 import * +from aimdk.protocol.common.joint_pb2 import * +from aimdk.protocol.common.se3_pose_pb2 import * + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n,aimdk/protocol/hal/joint/joint_channel.proto\x12\x0e\x61imdk.protocol\x1a\"aimdk/protocol/common/header.proto\x1a!aimdk/protocol/common/joint.proto\x1a$aimdk/protocol/common/se3_pose.proto\"h\n\x12JointStatesChannel\x12&\n\x06header\x18\x01 \x01(\x0b\x32\x16.aimdk.protocol.Header\x12*\n\x06states\x18\x02 \x03(\x0b\x32\x1a.aimdk.protocol.JointState\"K\n\x08IKStatus\x12\x13\n\x0bjoint_names\x18\x01 \x03(\t\x12\x17\n\x0fjoint_positions\x18\x02 \x03(\x01\x12\x11\n\tisSuccess\x18\x03 \x01(\x08\"n\n\x14JointCommandsChannel\x12&\n\x06header\x18\x01 \x01(\x0b\x32\x16.aimdk.protocol.Header\x12.\n\x08\x63ommands\x18\x02 \x03(\x0b\x32\x1c.aimdk.protocol.JointCommand\" \n\x0bGetJointReq\x12\x11\n\tserial_no\x18\x01 \x01(\t\"9\n\x0bGetJointRsp\x12*\n\x06states\x18\x01 \x03(\x0b\x32\x1a.aimdk.protocol.JointState\" \n\x0cGetEEPoseReq\x12\x10\n\x08is_right\x18\x01 \x01(\x08\"N\n\x0cGetEEPoseRsp\x12\x11\n\tprim_path\x18\x01 \x01(\t\x12+\n\x07\x65\x65_pose\x18\x02 \x01(\x0b\x32\x1a.aimdk.protocol.SE3RpyPose\"e\n\x0eGetIKStatusReq\x12\x10\n\x08is_right\x18\x01 \x01(\x08\x12/\n\x0btarget_pose\x18\x02 \x03(\x0b\x32\x1a.aimdk.protocol.SE3RpyPose\x12\x10\n\x08ObsAvoid\x18\x03 \x01(\x08\"<\n\x0eGetIKStatusRsp\x12*\n\x08IKStatus\x18\x02 \x03(\x0b\x32\x18.aimdk.protocol.IKStatus\"T\n\x0bSetJointReq\x12.\n\x08\x63ommands\x18\x01 \x03(\x0b\x32\x1c.aimdk.protocol.JointCommand\x12\x15\n\ris_trajectory\x18\x02 \x01(\x08\"\x1d\n\x0bSetJointRsp\x12\x0e\n\x06\x65rrmsg\x18\x01 \x01(\t2\xc9\x02\n\x13JointControlService\x12L\n\x10GetJointPosition\x12\x1b.aimdk.protocol.GetJointReq\x1a\x1b.aimdk.protocol.GetJointRsp\x12L\n\x10SetJointPosition\x12\x1b.aimdk.protocol.SetJointReq\x1a\x1b.aimdk.protocol.SetJointRsp\x12M\n\x0bGetIKStatus\x12\x1e.aimdk.protocol.GetIKStatusReq\x1a\x1e.aimdk.protocol.GetIKStatusRsp\x12G\n\tGetEEPose\x12\x1c.aimdk.protocol.GetEEPoseReq\x1a\x1c.aimdk.protocol.GetEEPoseRspP\x00P\x01P\x02\x62\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'aimdk.protocol.hal.joint.joint_channel_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals['_JOINTSTATESCHANNEL']._serialized_start=173 + _globals['_JOINTSTATESCHANNEL']._serialized_end=277 + _globals['_IKSTATUS']._serialized_start=279 + _globals['_IKSTATUS']._serialized_end=354 + _globals['_JOINTCOMMANDSCHANNEL']._serialized_start=356 + _globals['_JOINTCOMMANDSCHANNEL']._serialized_end=466 + _globals['_GETJOINTREQ']._serialized_start=468 + _globals['_GETJOINTREQ']._serialized_end=500 + _globals['_GETJOINTRSP']._serialized_start=502 + _globals['_GETJOINTRSP']._serialized_end=559 + _globals['_GETEEPOSEREQ']._serialized_start=561 + _globals['_GETEEPOSEREQ']._serialized_end=593 + _globals['_GETEEPOSERSP']._serialized_start=595 + _globals['_GETEEPOSERSP']._serialized_end=673 + _globals['_GETIKSTATUSREQ']._serialized_start=675 + _globals['_GETIKSTATUSREQ']._serialized_end=776 + _globals['_GETIKSTATUSRSP']._serialized_start=778 + _globals['_GETIKSTATUSRSP']._serialized_end=838 + _globals['_SETJOINTREQ']._serialized_start=840 + _globals['_SETJOINTREQ']._serialized_end=924 + _globals['_SETJOINTRSP']._serialized_start=926 + _globals['_SETJOINTRSP']._serialized_end=955 + _globals['_JOINTCONTROLSERVICE']._serialized_start=958 + _globals['_JOINTCONTROLSERVICE']._serialized_end=1287 +# @@protoc_insertion_point(module_scope) diff --git a/data_gen_dependencies/aimdk/protocol/hal/joint/joint_channel_pb2_grpc.py b/data_gen_dependencies/aimdk/protocol/hal/joint/joint_channel_pb2_grpc.py new file mode 100644 index 0000000..d66048d --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/joint/joint_channel_pb2_grpc.py @@ -0,0 +1,165 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from aimdk.protocol.hal.joint import joint_channel_pb2 as aimdk_dot_protocol_dot_hal_dot_joint_dot_joint__channel__pb2 + + +class JointControlServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.GetJointPosition = channel.unary_unary( + '/aimdk.protocol.JointControlService/GetJointPosition', + request_serializer=aimdk_dot_protocol_dot_hal_dot_joint_dot_joint__channel__pb2.GetJointReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_joint_dot_joint__channel__pb2.GetJointRsp.FromString, + ) + self.SetJointPosition = channel.unary_unary( + '/aimdk.protocol.JointControlService/SetJointPosition', + request_serializer=aimdk_dot_protocol_dot_hal_dot_joint_dot_joint__channel__pb2.SetJointReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_joint_dot_joint__channel__pb2.SetJointRsp.FromString, + ) + self.GetIKStatus = channel.unary_unary( + '/aimdk.protocol.JointControlService/GetIKStatus', + request_serializer=aimdk_dot_protocol_dot_hal_dot_joint_dot_joint__channel__pb2.GetIKStatusReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_joint_dot_joint__channel__pb2.GetIKStatusRsp.FromString, + ) + self.GetEEPose = channel.unary_unary( + '/aimdk.protocol.JointControlService/GetEEPose', + request_serializer=aimdk_dot_protocol_dot_hal_dot_joint_dot_joint__channel__pb2.GetEEPoseReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_hal_dot_joint_dot_joint__channel__pb2.GetEEPoseRsp.FromString, + ) + + +class JointControlServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def GetJointPosition(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SetJointPosition(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetIKStatus(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetEEPose(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_JointControlServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'GetJointPosition': grpc.unary_unary_rpc_method_handler( + servicer.GetJointPosition, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_joint_dot_joint__channel__pb2.GetJointReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_joint_dot_joint__channel__pb2.GetJointRsp.SerializeToString, + ), + 'SetJointPosition': grpc.unary_unary_rpc_method_handler( + servicer.SetJointPosition, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_joint_dot_joint__channel__pb2.SetJointReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_joint_dot_joint__channel__pb2.SetJointRsp.SerializeToString, + ), + 'GetIKStatus': grpc.unary_unary_rpc_method_handler( + servicer.GetIKStatus, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_joint_dot_joint__channel__pb2.GetIKStatusReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_joint_dot_joint__channel__pb2.GetIKStatusRsp.SerializeToString, + ), + 'GetEEPose': grpc.unary_unary_rpc_method_handler( + servicer.GetEEPose, + request_deserializer=aimdk_dot_protocol_dot_hal_dot_joint_dot_joint__channel__pb2.GetEEPoseReq.FromString, + response_serializer=aimdk_dot_protocol_dot_hal_dot_joint_dot_joint__channel__pb2.GetEEPoseRsp.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'aimdk.protocol.JointControlService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class JointControlService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def GetJointPosition(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.JointControlService/GetJointPosition', + aimdk_dot_protocol_dot_hal_dot_joint_dot_joint__channel__pb2.GetJointReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_joint_dot_joint__channel__pb2.GetJointRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def SetJointPosition(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.JointControlService/SetJointPosition', + aimdk_dot_protocol_dot_hal_dot_joint_dot_joint__channel__pb2.SetJointReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_joint_dot_joint__channel__pb2.SetJointRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetIKStatus(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.JointControlService/GetIKStatus', + aimdk_dot_protocol_dot_hal_dot_joint_dot_joint__channel__pb2.GetIKStatusReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_joint_dot_joint__channel__pb2.GetIKStatusRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetEEPose(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.JointControlService/GetEEPose', + aimdk_dot_protocol_dot_hal_dot_joint_dot_joint__channel__pb2.GetEEPoseReq.SerializeToString, + aimdk_dot_protocol_dot_hal_dot_joint_dot_joint__channel__pb2.GetEEPoseRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/data_gen_dependencies/aimdk/protocol/hal/joint/msg/Command.msg b/data_gen_dependencies/aimdk/protocol/hal/joint/msg/Command.msg new file mode 100644 index 0000000..b11a20c --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/joint/msg/Command.msg @@ -0,0 +1,7 @@ +string name +uint32 sequence +float64 position +float64 velocity +float64 effort +float64 stiffness +float64 damping \ No newline at end of file diff --git a/data_gen_dependencies/aimdk/protocol/hal/joint/msg/JointCommand.msg b/data_gen_dependencies/aimdk/protocol/hal/joint/msg/JointCommand.msg new file mode 100644 index 0000000..d7f2a25 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/joint/msg/JointCommand.msg @@ -0,0 +1,3 @@ +std_msgs/Header header + +Command[] joints \ No newline at end of file diff --git a/data_gen_dependencies/aimdk/protocol/hal/joint/msg/JointState.msg b/data_gen_dependencies/aimdk/protocol/hal/joint/msg/JointState.msg new file mode 100644 index 0000000..ea8162c --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/joint/msg/JointState.msg @@ -0,0 +1,3 @@ +std_msgs/Header header + +State[] joints \ No newline at end of file diff --git a/data_gen_dependencies/aimdk/protocol/hal/joint/msg/State.msg b/data_gen_dependencies/aimdk/protocol/hal/joint/msg/State.msg new file mode 100644 index 0000000..847ed3c --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/joint/msg/State.msg @@ -0,0 +1,5 @@ +string name +uint32 sequence +float64 position +float64 velocity +float64 effort \ No newline at end of file diff --git a/data_gen_dependencies/aimdk/protocol/hal/pose/msg/PoseCommand.msg b/data_gen_dependencies/aimdk/protocol/hal/pose/msg/PoseCommand.msg new file mode 100644 index 0000000..ed08fcb --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/pose/msg/PoseCommand.msg @@ -0,0 +1,6 @@ +float64 x +float64 y +float64 z +float64 rx +float64 ry +float64 rz diff --git a/data_gen_dependencies/aimdk/protocol/hal/pose/msg/PoseCommands.msg b/data_gen_dependencies/aimdk/protocol/hal/pose/msg/PoseCommands.msg new file mode 100644 index 0000000..12788d3 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/pose/msg/PoseCommands.msg @@ -0,0 +1,4 @@ +std_msgs/Header header + +string[] names +PoseCommand[] poses \ No newline at end of file diff --git a/data_gen_dependencies/aimdk/protocol/hal/pose/msg/PoseState.msg b/data_gen_dependencies/aimdk/protocol/hal/pose/msg/PoseState.msg new file mode 100644 index 0000000..ed08fcb --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/pose/msg/PoseState.msg @@ -0,0 +1,6 @@ +float64 x +float64 y +float64 z +float64 rx +float64 ry +float64 rz diff --git a/data_gen_dependencies/aimdk/protocol/hal/pose/msg/PoseStates.msg b/data_gen_dependencies/aimdk/protocol/hal/pose/msg/PoseStates.msg new file mode 100644 index 0000000..79c94ad --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/pose/msg/PoseStates.msg @@ -0,0 +1,5 @@ +std_msgs/Header header + +string[] names +PoseState[] poses +TorqSensorState[] torqs diff --git a/data_gen_dependencies/aimdk/protocol/hal/pose/msg/TorqSensorState.msg b/data_gen_dependencies/aimdk/protocol/hal/pose/msg/TorqSensorState.msg new file mode 100644 index 0000000..85845bb --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/pose/msg/TorqSensorState.msg @@ -0,0 +1,3 @@ +float64[] act_torque +float64[] torque +float64[] real_torque diff --git a/data_gen_dependencies/aimdk/protocol/hal/sensors/msg/FootSensorState.msg b/data_gen_dependencies/aimdk/protocol/hal/sensors/msg/FootSensorState.msg new file mode 100644 index 0000000..b605a13 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/sensors/msg/FootSensorState.msg @@ -0,0 +1,4 @@ +std_msgs/Header header + +string[] names +SensorState[] states \ No newline at end of file diff --git a/data_gen_dependencies/aimdk/protocol/hal/sensors/msg/LivoxPoint.msg b/data_gen_dependencies/aimdk/protocol/hal/sensors/msg/LivoxPoint.msg new file mode 100644 index 0000000..7fba176 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/sensors/msg/LivoxPoint.msg @@ -0,0 +1,9 @@ +# Livox costom pointcloud format. + +uint32 offset_time # offset time relative to the base time +float32 x # X axis, unit:m +float32 y # Y axis, unit:m +float32 z # Z axis, unit:m +uint8 reflectivity # reflectivity, 0~255 +uint8 tag # livox tag +uint8 line # laser number in lidar \ No newline at end of file diff --git a/data_gen_dependencies/aimdk/protocol/hal/sensors/msg/LivoxPointCloud.msg b/data_gen_dependencies/aimdk/protocol/hal/sensors/msg/LivoxPointCloud.msg new file mode 100644 index 0000000..e62eeae --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/sensors/msg/LivoxPointCloud.msg @@ -0,0 +1,8 @@ +# Livox publish pointcloud msg format. + +std_msgs/Header header # ROS standard message header +uint64 timebase # The time of first point +uint32 point_num # Total number of pointclouds +uint8 lidar_id # Lidar device id number +uint8[3] rsvd # Reserved use +LivoxPoint[] points # Pointcloud data diff --git a/data_gen_dependencies/aimdk/protocol/hal/sensors/msg/SensorState.msg b/data_gen_dependencies/aimdk/protocol/hal/sensors/msg/SensorState.msg new file mode 100644 index 0000000..fb6e2fe --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/sensors/msg/SensorState.msg @@ -0,0 +1 @@ +int16[] presure diff --git a/data_gen_dependencies/aimdk/protocol/hal/sensors/rs2_camera.proto b/data_gen_dependencies/aimdk/protocol/hal/sensors/rs2_camera.proto new file mode 100644 index 0000000..100b617 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/sensors/rs2_camera.proto @@ -0,0 +1,98 @@ +syntax = "proto3"; +package aimdk.protocol; + + +/** + * @brief 图像类型,包括深度和彩色 + */ +enum ImageType { + ImageType_UNDEFINED = 0; + ImageType_DEPTH = 1; + ImageType_COLOR = 2; +} + +/** + * @brief 图像模型参数 + */ +enum DistortionType { + DistortionType_UNDEFINED = 0; + DistortionType_MODIFIED_BROWN_CONRADY = 1; + DistortionType_INVERSE_BROWN_CONRADY = 2; + DistortionType_FTHETA = 3; + DistortionType_BROWN_CONRADY = 4; + DistortionType_KANNALA_BRANDT4 = 5; + DistortionType_COUNT = 6; +} + +/** + * @brief 压缩图像数据 + */ +message CompressedImage { + string format = 2; + bytes data = 3; +} + +/** + * @brief 相机内参 + */ +message CameraInfo { + int32 width = 1; + int32 height = 2; + float ppx = 3; + float ppy = 4; + float fx = 5; + float fy = 6; + DistortionType model = 7; + repeated float coeffs = 8; +} + +/** + * @brief rpc GetCameraInfo() 的请求数据 + */ +message GetCameraInfoRequest { + string serial_no = 1; + ImageType stream_type = 2; +} + +/** + * @brief rpc GetCameraInfo() 的响应数据 + */ +message GetCameraInfoResponse { + repeated CameraInfo info = 1; +} + +/** + * @brief rpc GetCameraData() 的请求数据 + */ +message GetCameraDataRequest { + string serial_no = 1; +} + +/** + * @brief rpc GetCameraData() 的响应数据 + */ +message GetCameraDataResponse { + string serial_no = 1; + CameraInfo color_info = 2; + CompressedImage color_image = 3; + CameraInfo depth_info = 4; + CompressedImage depth_image = 5; +} + +/** + * @brief 由 aimrt_hal 中的 realsense_camera_module::RealsenseCameraModule 模块提供的服务 + */ +service CameraService { + /** + * @brief 获取 realsense_camera_module::RealsenseCameraModule 收到的相机内参信息。 + * @param GetCameraInfoRequest 相机序列号 + * @return GetCameraInfoResponse 相机内参 + */ + rpc GetCameraInfo(GetCameraInfoRequest) returns (GetCameraInfoResponse); + /** + * @brief 获取 realsense_camera_module::RealsenseCameraModule 收到的相机数据,包括相机内参、彩色和深度图像信息。 + * @param GetCameraDataRequest 相机序列号 + * @return GetCameraDataResponse 详细数据 + */ + rpc GetCameraData(GetCameraDataRequest) returns (GetCameraDataResponse); +} diff --git a/data_gen_dependencies/aimdk/protocol/hal/sensors/rs2_camera_pb2.py b/data_gen_dependencies/aimdk/protocol/hal/sensors/rs2_camera_pb2.py new file mode 100644 index 0000000..d99fea8 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/sensors/rs2_camera_pb2.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: protocol/hal/sensors/rs2_camera.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%protocol/hal/sensors/rs2_camera.proto\x12\x0e\x61imdk.protocol\"/\n\x0f\x43ompressedImage\x12\x0e\n\x06\x66ormat\x18\x02 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\"\x9c\x01\n\nCameraInfo\x12\r\n\x05width\x18\x01 \x01(\x05\x12\x0e\n\x06height\x18\x02 \x01(\x05\x12\x0b\n\x03ppx\x18\x03 \x01(\x02\x12\x0b\n\x03ppy\x18\x04 \x01(\x02\x12\n\n\x02\x66x\x18\x05 \x01(\x02\x12\n\n\x02\x66y\x18\x06 \x01(\x02\x12-\n\x05model\x18\x07 \x01(\x0e\x32\x1e.aimdk.protocol.DistortionType\x12\x0e\n\x06\x63oeffs\x18\x08 \x03(\x02\"Y\n\x14GetCameraInfoRequest\x12\x11\n\tserial_no\x18\x01 \x01(\t\x12.\n\x0bstream_type\x18\x02 \x01(\x0e\x32\x19.aimdk.protocol.ImageType\"A\n\x15GetCameraInfoResponse\x12(\n\x04info\x18\x01 \x03(\x0b\x32\x1a.aimdk.protocol.CameraInfo\")\n\x14GetCameraDataRequest\x12\x11\n\tserial_no\x18\x01 \x01(\t\"\xf6\x01\n\x15GetCameraDataResponse\x12\x11\n\tserial_no\x18\x01 \x01(\t\x12.\n\ncolor_info\x18\x02 \x01(\x0b\x32\x1a.aimdk.protocol.CameraInfo\x12\x34\n\x0b\x63olor_image\x18\x03 \x01(\x0b\x32\x1f.aimdk.protocol.CompressedImage\x12.\n\ndepth_info\x18\x04 \x01(\x0b\x32\x1a.aimdk.protocol.CameraInfo\x12\x34\n\x0b\x64\x65pth_image\x18\x05 \x01(\x0b\x32\x1f.aimdk.protocol.CompressedImage*N\n\tImageType\x12\x17\n\x13ImageType_UNDEFINED\x10\x00\x12\x13\n\x0fImageType_DEPTH\x10\x01\x12\x13\n\x0fImageType_COLOR\x10\x02*\xfe\x01\n\x0e\x44istortionType\x12\x1c\n\x18\x44istortionType_UNDEFINED\x10\x00\x12)\n%DistortionType_MODIFIED_BROWN_CONRADY\x10\x01\x12(\n$DistortionType_INVERSE_BROWN_CONRADY\x10\x02\x12\x19\n\x15\x44istortionType_FTHETA\x10\x03\x12 \n\x1c\x44istortionType_BROWN_CONRADY\x10\x04\x12\"\n\x1e\x44istortionType_KANNALA_BRANDT4\x10\x05\x12\x18\n\x14\x44istortionType_COUNT\x10\x06\x32\xcb\x01\n\rCameraService\x12\\\n\rGetCameraInfo\x12$.aimdk.protocol.GetCameraInfoRequest\x1a%.aimdk.protocol.GetCameraInfoResponse\x12\\\n\rGetCameraData\x12$.aimdk.protocol.GetCameraDataRequest\x1a%.aimdk.protocol.GetCameraDataResponseb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'protocol.hal.sensors.rs2_camera_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals['_IMAGETYPE']._serialized_start=715 + _globals['_IMAGETYPE']._serialized_end=793 + _globals['_DISTORTIONTYPE']._serialized_start=796 + _globals['_DISTORTIONTYPE']._serialized_end=1050 + _globals['_COMPRESSEDIMAGE']._serialized_start=57 + _globals['_COMPRESSEDIMAGE']._serialized_end=104 + _globals['_CAMERAINFO']._serialized_start=107 + _globals['_CAMERAINFO']._serialized_end=263 + _globals['_GETCAMERAINFOREQUEST']._serialized_start=265 + _globals['_GETCAMERAINFOREQUEST']._serialized_end=354 + _globals['_GETCAMERAINFORESPONSE']._serialized_start=356 + _globals['_GETCAMERAINFORESPONSE']._serialized_end=421 + _globals['_GETCAMERADATAREQUEST']._serialized_start=423 + _globals['_GETCAMERADATAREQUEST']._serialized_end=464 + _globals['_GETCAMERADATARESPONSE']._serialized_start=467 + _globals['_GETCAMERADATARESPONSE']._serialized_end=713 + _globals['_CAMERASERVICE']._serialized_start=1053 + _globals['_CAMERASERVICE']._serialized_end=1256 +# @@protoc_insertion_point(module_scope) diff --git a/data_gen_dependencies/aimdk/protocol/hal/sensors/rs2_camera_pb2_grpc.py b/data_gen_dependencies/aimdk/protocol/hal/sensors/rs2_camera_pb2_grpc.py new file mode 100644 index 0000000..7802fb7 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/hal/sensors/rs2_camera_pb2_grpc.py @@ -0,0 +1,113 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from . import rs2_camera_pb2 as protocol_dot_hal_dot_sensors_dot_rs2__camera__pb2 + + +class CameraServiceStub(object): + """* + @brief 由 aimrt_hal 中的 realsense_camera_module::RealsenseCameraModule 模块提供的服务 + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.GetCameraInfo = channel.unary_unary( + '/aimdk.protocol.CameraService/GetCameraInfo', + request_serializer=protocol_dot_hal_dot_sensors_dot_rs2__camera__pb2.GetCameraInfoRequest.SerializeToString, + response_deserializer=protocol_dot_hal_dot_sensors_dot_rs2__camera__pb2.GetCameraInfoResponse.FromString, + ) + self.GetCameraData = channel.unary_unary( + '/aimdk.protocol.CameraService/GetCameraData', + request_serializer=protocol_dot_hal_dot_sensors_dot_rs2__camera__pb2.GetCameraDataRequest.SerializeToString, + response_deserializer=protocol_dot_hal_dot_sensors_dot_rs2__camera__pb2.GetCameraDataResponse.FromString, + ) + + +class CameraServiceServicer(object): + """* + @brief 由 aimrt_hal 中的 realsense_camera_module::RealsenseCameraModule 模块提供的服务 + """ + + def GetCameraInfo(self, request, context): + """* + @brief 获取 realsense_camera_module::RealsenseCameraModule 收到的相机内参信息。 + @param GetCameraInfoRequest 相机序列号 + @return GetCameraInfoResponse 相机内参 + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetCameraData(self, request, context): + """* + @brief 获取 realsense_camera_module::RealsenseCameraModule 收到的相机数据,包括相机内参、彩色和深度图像信息。 + @param GetCameraDataRequest 相机序列号 + @return GetCameraDataResponse 详细数据 + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_CameraServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'GetCameraInfo': grpc.unary_unary_rpc_method_handler( + servicer.GetCameraInfo, + request_deserializer=protocol_dot_hal_dot_sensors_dot_rs2__camera__pb2.GetCameraInfoRequest.FromString, + response_serializer=protocol_dot_hal_dot_sensors_dot_rs2__camera__pb2.GetCameraInfoResponse.SerializeToString, + ), + 'GetCameraData': grpc.unary_unary_rpc_method_handler( + servicer.GetCameraData, + request_deserializer=protocol_dot_hal_dot_sensors_dot_rs2__camera__pb2.GetCameraDataRequest.FromString, + response_serializer=protocol_dot_hal_dot_sensors_dot_rs2__camera__pb2.GetCameraDataResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'aimdk.protocol.CameraService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class CameraService(object): + """* + @brief 由 aimrt_hal 中的 realsense_camera_module::RealsenseCameraModule 模块提供的服务 + """ + + @staticmethod + def GetCameraInfo(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.CameraService/GetCameraInfo', + protocol_dot_hal_dot_sensors_dot_rs2__camera__pb2.GetCameraInfoRequest.SerializeToString, + protocol_dot_hal_dot_sensors_dot_rs2__camera__pb2.GetCameraInfoResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetCameraData(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.CameraService/GetCameraData', + protocol_dot_hal_dot_sensors_dot_rs2__camera__pb2.GetCameraDataRequest.SerializeToString, + protocol_dot_hal_dot_sensors_dot_rs2__camera__pb2.GetCameraDataResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/data_gen_dependencies/aimdk/protocol/sim/sim_camera_service.proto b/data_gen_dependencies/aimdk/protocol/sim/sim_camera_service.proto new file mode 100644 index 0000000..d397fbe --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/sim/sim_camera_service.proto @@ -0,0 +1,25 @@ +syntax = "proto3"; +package aimdk.protocol; + +message SemanticMask { + string name = 2; + bytes data = 3; +} +message SemanticLabel { + int32 label_id = 2; + string label_name = 3; +} + +message GetSemanticRequest { + string serial_no = 1; +} +message GetSemanticResponse { + string serial_no = 1; + SemanticMask semantic_mask = 2; + repeated SemanticLabel label_dict = 3; +} + +service SimCameraService { + + rpc GetSemanticData(GetSemanticRequest) returns(GetSemanticResponse); +} diff --git a/data_gen_dependencies/aimdk/protocol/sim/sim_camera_service_pb2.py b/data_gen_dependencies/aimdk/protocol/sim/sim_camera_service_pb2.py new file mode 100644 index 0000000..5717432 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/sim/sim_camera_service_pb2.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: protocol/sim/sim_camera_service.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%protocol/sim/sim_camera_service.proto\x12\x0e\x61imdk.protocol\"*\n\x0cSemanticMask\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\"5\n\rSemanticLabel\x12\x10\n\x08label_id\x18\x02 \x01(\x05\x12\x12\n\nlabel_name\x18\x03 \x01(\t\"\'\n\x12GetSemanticRequest\x12\x11\n\tserial_no\x18\x01 \x01(\t\"\x90\x01\n\x13GetSemanticResponse\x12\x11\n\tserial_no\x18\x01 \x01(\t\x12\x33\n\rsemantic_mask\x18\x02 \x01(\x0b\x32\x1c.aimdk.protocol.SemanticMask\x12\x31\n\nlabel_dict\x18\x03 \x03(\x0b\x32\x1d.aimdk.protocol.SemanticLabel2n\n\x10SimCameraService\x12Z\n\x0fGetSemanticData\x12\".aimdk.protocol.GetSemanticRequest\x1a#.aimdk.protocol.GetSemanticResponseb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'protocol.sim.sim_camera_service_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals['_SEMANTICMASK']._serialized_start=57 + _globals['_SEMANTICMASK']._serialized_end=99 + _globals['_SEMANTICLABEL']._serialized_start=101 + _globals['_SEMANTICLABEL']._serialized_end=154 + _globals['_GETSEMANTICREQUEST']._serialized_start=156 + _globals['_GETSEMANTICREQUEST']._serialized_end=195 + _globals['_GETSEMANTICRESPONSE']._serialized_start=198 + _globals['_GETSEMANTICRESPONSE']._serialized_end=342 + _globals['_SIMCAMERASERVICE']._serialized_start=344 + _globals['_SIMCAMERASERVICE']._serialized_end=454 +# @@protoc_insertion_point(module_scope) diff --git a/data_gen_dependencies/aimdk/protocol/sim/sim_camera_service_pb2_grpc.py b/data_gen_dependencies/aimdk/protocol/sim/sim_camera_service_pb2_grpc.py new file mode 100644 index 0000000..27f31bb --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/sim/sim_camera_service_pb2_grpc.py @@ -0,0 +1,66 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from . import sim_camera_service_pb2 as protocol_dot_sim_dot_sim__camera__service__pb2 + + +class SimCameraServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.GetSemanticData = channel.unary_unary( + '/aimdk.protocol.SimCameraService/GetSemanticData', + request_serializer=protocol_dot_sim_dot_sim__camera__service__pb2.GetSemanticRequest.SerializeToString, + response_deserializer=protocol_dot_sim_dot_sim__camera__service__pb2.GetSemanticResponse.FromString, + ) + + +class SimCameraServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def GetSemanticData(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_SimCameraServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'GetSemanticData': grpc.unary_unary_rpc_method_handler( + servicer.GetSemanticData, + request_deserializer=protocol_dot_sim_dot_sim__camera__service__pb2.GetSemanticRequest.FromString, + response_serializer=protocol_dot_sim_dot_sim__camera__service__pb2.GetSemanticResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'aimdk.protocol.SimCameraService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class SimCameraService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def GetSemanticData(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.SimCameraService/GetSemanticData', + protocol_dot_sim_dot_sim__camera__service__pb2.GetSemanticRequest.SerializeToString, + protocol_dot_sim_dot_sim__camera__service__pb2.GetSemanticResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/data_gen_dependencies/aimdk/protocol/sim/sim_gripper_service.proto b/data_gen_dependencies/aimdk/protocol/sim/sim_gripper_service.proto new file mode 100644 index 0000000..c80c69e --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/sim/sim_gripper_service.proto @@ -0,0 +1,24 @@ +syntax = "proto3"; +package aimdk.protocol; + +import public "aimdk/protocol/common/se3_pose.proto"; + +message SetGripperStateReq { + string gripper_command = 1; + bool is_right = 2; + double opened_width = 3; +} +message SetGripperStateRsp { + string msg = 1; +} +message GetGripperStateReq { + bool is_right = 1; +} +message GetGripperStateRsp { + string gripper_name = 1 + SE3RpyPose gripper_pose = 2; +} +service SimGripperService { + rpc SetGripperState(SetGripperStateReq) returns(SetGripperStateRsp); + rpc GetGripperState(GetGripperStateReq) returns(GetGripperStateRsp); +} \ No newline at end of file diff --git a/data_gen_dependencies/aimdk/protocol/sim/sim_gripper_service_pb2.py b/data_gen_dependencies/aimdk/protocol/sim/sim_gripper_service_pb2.py new file mode 100644 index 0000000..528f640 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/sim/sim_gripper_service_pb2.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: aimdk/protocol/sim/sim_gripper_service.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from aimdk.protocol.common import se3_pose_pb2 as aimdk_dot_protocol_dot_common_dot_se3__pose__pb2 +try: + aimdk_dot_protocol_dot_common_dot_vec3__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk_dot_protocol_dot_common_dot_vec3__pb2 +except AttributeError: + aimdk_dot_protocol_dot_common_dot_vec3__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk.protocol.common.vec3_pb2 +try: + aimdk_dot_protocol_dot_common_dot_quaternion__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk_dot_protocol_dot_common_dot_quaternion__pb2 +except AttributeError: + aimdk_dot_protocol_dot_common_dot_quaternion__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk.protocol.common.quaternion_pb2 +try: + aimdk_dot_protocol_dot_common_dot_rpy__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk_dot_protocol_dot_common_dot_rpy__pb2 +except AttributeError: + aimdk_dot_protocol_dot_common_dot_rpy__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk.protocol.common.rpy_pb2 + +from aimdk.protocol.common.se3_pose_pb2 import * + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n,aimdk/protocol/sim/sim_gripper_service.proto\x12\x0e\x61imdk.protocol\x1a$aimdk/protocol/common/se3_pose.proto\"U\n\x12SetGripperStateReq\x12\x17\n\x0fgripper_command\x18\x01 \x01(\t\x12\x10\n\x08is_right\x18\x02 \x01(\x08\x12\x14\n\x0copened_width\x18\x03 \x01(\x01\"!\n\x12SetGripperStateRsp\x12\x0b\n\x03msg\x18\x01 \x01(\t\"&\n\x12GetGripperStateReq\x12\x10\n\x08is_right\x18\x01 \x01(\x08\"F\n\x12GetGripperStateRsp\x12\x30\n\x0cgripper_pose\x18\x01 \x01(\x0b\x32\x1a.aimdk.protocol.SE3RpyPose2\xc9\x01\n\x11SimGripperService\x12Y\n\x0fSetGripperState\x12\".aimdk.protocol.SetGripperStateReq\x1a\".aimdk.protocol.SetGripperStateRsp\x12Y\n\x0fGetGripperState\x12\".aimdk.protocol.GetGripperStateReq\x1a\".aimdk.protocol.GetGripperStateRspP\x00\x62\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'aimdk.protocol.sim.sim_gripper_service_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals['_SETGRIPPERSTATEREQ']._serialized_start=102 + _globals['_SETGRIPPERSTATEREQ']._serialized_end=187 + _globals['_SETGRIPPERSTATERSP']._serialized_start=189 + _globals['_SETGRIPPERSTATERSP']._serialized_end=222 + _globals['_GETGRIPPERSTATEREQ']._serialized_start=224 + _globals['_GETGRIPPERSTATEREQ']._serialized_end=262 + _globals['_GETGRIPPERSTATERSP']._serialized_start=264 + _globals['_GETGRIPPERSTATERSP']._serialized_end=334 + _globals['_SIMGRIPPERSERVICE']._serialized_start=337 + _globals['_SIMGRIPPERSERVICE']._serialized_end=538 +# @@protoc_insertion_point(module_scope) diff --git a/data_gen_dependencies/aimdk/protocol/sim/sim_gripper_service_pb2_grpc.py b/data_gen_dependencies/aimdk/protocol/sim/sim_gripper_service_pb2_grpc.py new file mode 100644 index 0000000..86f157c --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/sim/sim_gripper_service_pb2_grpc.py @@ -0,0 +1,99 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from aimdk.protocol.sim import sim_gripper_service_pb2 as aimdk_dot_protocol_dot_sim_dot_sim__gripper__service__pb2 + + +class SimGripperServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.SetGripperState = channel.unary_unary( + '/aimdk.protocol.SimGripperService/SetGripperState', + request_serializer=aimdk_dot_protocol_dot_sim_dot_sim__gripper__service__pb2.SetGripperStateReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__gripper__service__pb2.SetGripperStateRsp.FromString, + ) + self.GetGripperState = channel.unary_unary( + '/aimdk.protocol.SimGripperService/GetGripperState', + request_serializer=aimdk_dot_protocol_dot_sim_dot_sim__gripper__service__pb2.GetGripperStateReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__gripper__service__pb2.GetGripperStateRsp.FromString, + ) + + +class SimGripperServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def SetGripperState(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetGripperState(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_SimGripperServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'SetGripperState': grpc.unary_unary_rpc_method_handler( + servicer.SetGripperState, + request_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__gripper__service__pb2.SetGripperStateReq.FromString, + response_serializer=aimdk_dot_protocol_dot_sim_dot_sim__gripper__service__pb2.SetGripperStateRsp.SerializeToString, + ), + 'GetGripperState': grpc.unary_unary_rpc_method_handler( + servicer.GetGripperState, + request_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__gripper__service__pb2.GetGripperStateReq.FromString, + response_serializer=aimdk_dot_protocol_dot_sim_dot_sim__gripper__service__pb2.GetGripperStateRsp.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'aimdk.protocol.SimGripperService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class SimGripperService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def SetGripperState(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.SimGripperService/SetGripperState', + aimdk_dot_protocol_dot_sim_dot_sim__gripper__service__pb2.SetGripperStateReq.SerializeToString, + aimdk_dot_protocol_dot_sim_dot_sim__gripper__service__pb2.SetGripperStateRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetGripperState(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.SimGripperService/GetGripperState', + aimdk_dot_protocol_dot_sim_dot_sim__gripper__service__pb2.GetGripperStateReq.SerializeToString, + aimdk_dot_protocol_dot_sim_dot_sim__gripper__service__pb2.GetGripperStateRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/data_gen_dependencies/aimdk/protocol/sim/sim_joint_service.proto b/data_gen_dependencies/aimdk/protocol/sim/sim_joint_service.proto new file mode 100644 index 0000000..e69de29 diff --git a/data_gen_dependencies/aimdk/protocol/sim/sim_object_service.proto b/data_gen_dependencies/aimdk/protocol/sim/sim_object_service.proto new file mode 100644 index 0000000..071073c --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/sim/sim_object_service.proto @@ -0,0 +1,57 @@ +syntax = "proto3"; +package aimdk.protocol; + +import public "aimdk/protocol/common/se3_pose.proto"; +import public "aimdk/protocol/common/vec3.proto"; + +message AddObjectReq { + string usd_path = 1; //物体usd资源位置 + string prim_path = 2; //物体摆放在场景中的primpath + string label_name = 3; //物体标签名称 + SE3RpyPose object_pose = 4; //物体放置的位姿 + Vec3 object_scale = 5; //物体缩放比例 + Color object_color = 6; + string object_material = 7; + double object_mass = 8; + bool add_particle = 9; + Vec3 particle_position = 10; + Vec3 particle_scale = 11; + Color particle_color = 12; +} +message Color { + float r = 1; + float g = 2; + float b = 3; +} +message AddObjectRsp { + string prim_path = 1; + string label_name = 2; +} +message GetObjectPoseReq { + string prim_path = 1; +} +message GetObjectPoseRsp { + string prim_path = 1; + SE3RpyPose object_pose = 2; +} +message GetObjectJointReq { + string prim_path = 1; +} +message GetObjectJointRsp { + repeated string joint_names = 1; + repeated double joint_positions = 2; + repeated double joint_velocities = 3; +} +message SetTargetPointReq { + Vec3 point_position = 1; +} +message SetTargetPointRsp { + string msg = 1; +} + +service SimObjectService { + rpc AddObject(AddObjectReq) returns(AddObjectRsp); + rpc GetObjectPose(GetObjectPoseReq) returns(GetObjectPoseRsp); + rpc GetObjectJoint(GetObjectJointReq) returns(GetObjectJointRsp); + rpc SetTargetPoint(SetTargetPointReq) returns(SetTargetPointRsp); +} diff --git a/data_gen_dependencies/aimdk/protocol/sim/sim_object_service_pb2.py b/data_gen_dependencies/aimdk/protocol/sim/sim_object_service_pb2.py new file mode 100644 index 0000000..b423233 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/sim/sim_object_service_pb2.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: aimdk/protocol/sim/sim_object_service.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from aimdk.protocol.common import se3_pose_pb2 as aimdk_dot_protocol_dot_common_dot_se3__pose__pb2 +try: + aimdk_dot_protocol_dot_common_dot_vec3__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk_dot_protocol_dot_common_dot_vec3__pb2 +except AttributeError: + aimdk_dot_protocol_dot_common_dot_vec3__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk.protocol.common.vec3_pb2 +try: + aimdk_dot_protocol_dot_common_dot_quaternion__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk_dot_protocol_dot_common_dot_quaternion__pb2 +except AttributeError: + aimdk_dot_protocol_dot_common_dot_quaternion__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk.protocol.common.quaternion_pb2 +try: + aimdk_dot_protocol_dot_common_dot_rpy__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk_dot_protocol_dot_common_dot_rpy__pb2 +except AttributeError: + aimdk_dot_protocol_dot_common_dot_rpy__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk.protocol.common.rpy_pb2 +from aimdk.protocol.common import vec3_pb2 as aimdk_dot_protocol_dot_common_dot_vec3__pb2 + +from aimdk.protocol.common.se3_pose_pb2 import * +from aimdk.protocol.common.vec3_pb2 import * + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n+aimdk/protocol/sim/sim_object_service.proto\x12\x0e\x61imdk.protocol\x1a$aimdk/protocol/common/se3_pose.proto\x1a aimdk/protocol/common/vec3.proto\"\xa3\x03\n\x0c\x41\x64\x64ObjectReq\x12\x10\n\x08usd_path\x18\x01 \x01(\t\x12\x11\n\tprim_path\x18\x02 \x01(\t\x12\x12\n\nlabel_name\x18\x03 \x01(\t\x12/\n\x0bobject_pose\x18\x04 \x01(\x0b\x32\x1a.aimdk.protocol.SE3RpyPose\x12*\n\x0cobject_scale\x18\x05 \x01(\x0b\x32\x14.aimdk.protocol.Vec3\x12+\n\x0cobject_color\x18\x06 \x01(\x0b\x32\x15.aimdk.protocol.Color\x12\x17\n\x0fobject_material\x18\x07 \x01(\t\x12\x13\n\x0bobject_mass\x18\x08 \x01(\x01\x12\x14\n\x0c\x61\x64\x64_particle\x18\t \x01(\x08\x12/\n\x11particle_position\x18\n \x01(\x0b\x32\x14.aimdk.protocol.Vec3\x12,\n\x0eparticle_scale\x18\x0b \x01(\x0b\x32\x14.aimdk.protocol.Vec3\x12-\n\x0eparticle_color\x18\x0c \x01(\x0b\x32\x15.aimdk.protocol.Color\"(\n\x05\x43olor\x12\t\n\x01r\x18\x01 \x01(\x02\x12\t\n\x01g\x18\x02 \x01(\x02\x12\t\n\x01\x62\x18\x03 \x01(\x02\"5\n\x0c\x41\x64\x64ObjectRsp\x12\x11\n\tprim_path\x18\x01 \x01(\t\x12\x12\n\nlabel_name\x18\x02 \x01(\t\"%\n\x10GetObjectPoseReq\x12\x11\n\tprim_path\x18\x01 \x01(\t\"V\n\x10GetObjectPoseRsp\x12\x11\n\tprim_path\x18\x01 \x01(\t\x12/\n\x0bobject_pose\x18\x02 \x01(\x0b\x32\x1a.aimdk.protocol.SE3RpyPose\"&\n\x11GetObjectJointReq\x12\x11\n\tprim_path\x18\x01 \x01(\t\"[\n\x11GetObjectJointRsp\x12\x13\n\x0bjoint_names\x18\x01 \x03(\t\x12\x17\n\x0fjoint_positions\x18\x02 \x03(\x01\x12\x18\n\x10joint_velocities\x18\x03 \x03(\x01\"A\n\x11SetTargetPointReq\x12,\n\x0epoint_position\x18\x01 \x01(\x0b\x32\x14.aimdk.protocol.Vec3\" \n\x11SetTargetPointRsp\x12\x0b\n\x03msg\x18\x01 \x01(\t2\xe0\x02\n\x10SimObjectService\x12G\n\tAddObject\x12\x1c.aimdk.protocol.AddObjectReq\x1a\x1c.aimdk.protocol.AddObjectRsp\x12S\n\rGetObjectPose\x12 .aimdk.protocol.GetObjectPoseReq\x1a .aimdk.protocol.GetObjectPoseRsp\x12V\n\x0eGetObjectJoint\x12!.aimdk.protocol.GetObjectJointReq\x1a!.aimdk.protocol.GetObjectJointRsp\x12V\n\x0eSetTargetPoint\x12!.aimdk.protocol.SetTargetPointReq\x1a!.aimdk.protocol.SetTargetPointRspP\x00P\x01\x62\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'aimdk.protocol.sim.sim_object_service_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals['_ADDOBJECTREQ']._serialized_start=136 + _globals['_ADDOBJECTREQ']._serialized_end=555 + _globals['_COLOR']._serialized_start=557 + _globals['_COLOR']._serialized_end=597 + _globals['_ADDOBJECTRSP']._serialized_start=599 + _globals['_ADDOBJECTRSP']._serialized_end=652 + _globals['_GETOBJECTPOSEREQ']._serialized_start=654 + _globals['_GETOBJECTPOSEREQ']._serialized_end=691 + _globals['_GETOBJECTPOSERSP']._serialized_start=693 + _globals['_GETOBJECTPOSERSP']._serialized_end=779 + _globals['_GETOBJECTJOINTREQ']._serialized_start=781 + _globals['_GETOBJECTJOINTREQ']._serialized_end=819 + _globals['_GETOBJECTJOINTRSP']._serialized_start=821 + _globals['_GETOBJECTJOINTRSP']._serialized_end=912 + _globals['_SETTARGETPOINTREQ']._serialized_start=914 + _globals['_SETTARGETPOINTREQ']._serialized_end=979 + _globals['_SETTARGETPOINTRSP']._serialized_start=981 + _globals['_SETTARGETPOINTRSP']._serialized_end=1013 + _globals['_SIMOBJECTSERVICE']._serialized_start=1016 + _globals['_SIMOBJECTSERVICE']._serialized_end=1368 +# @@protoc_insertion_point(module_scope) diff --git a/data_gen_dependencies/aimdk/protocol/sim/sim_object_service_pb2_grpc.py b/data_gen_dependencies/aimdk/protocol/sim/sim_object_service_pb2_grpc.py new file mode 100644 index 0000000..f3966cf --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/sim/sim_object_service_pb2_grpc.py @@ -0,0 +1,165 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from aimdk.protocol.sim import sim_object_service_pb2 as aimdk_dot_protocol_dot_sim_dot_sim__object__service__pb2 + + +class SimObjectServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.AddObject = channel.unary_unary( + '/aimdk.protocol.SimObjectService/AddObject', + request_serializer=aimdk_dot_protocol_dot_sim_dot_sim__object__service__pb2.AddObjectReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__object__service__pb2.AddObjectRsp.FromString, + ) + self.GetObjectPose = channel.unary_unary( + '/aimdk.protocol.SimObjectService/GetObjectPose', + request_serializer=aimdk_dot_protocol_dot_sim_dot_sim__object__service__pb2.GetObjectPoseReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__object__service__pb2.GetObjectPoseRsp.FromString, + ) + self.GetObjectJoint = channel.unary_unary( + '/aimdk.protocol.SimObjectService/GetObjectJoint', + request_serializer=aimdk_dot_protocol_dot_sim_dot_sim__object__service__pb2.GetObjectJointReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__object__service__pb2.GetObjectJointRsp.FromString, + ) + self.SetTargetPoint = channel.unary_unary( + '/aimdk.protocol.SimObjectService/SetTargetPoint', + request_serializer=aimdk_dot_protocol_dot_sim_dot_sim__object__service__pb2.SetTargetPointReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__object__service__pb2.SetTargetPointRsp.FromString, + ) + + +class SimObjectServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def AddObject(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetObjectPose(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetObjectJoint(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SetTargetPoint(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_SimObjectServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'AddObject': grpc.unary_unary_rpc_method_handler( + servicer.AddObject, + request_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__object__service__pb2.AddObjectReq.FromString, + response_serializer=aimdk_dot_protocol_dot_sim_dot_sim__object__service__pb2.AddObjectRsp.SerializeToString, + ), + 'GetObjectPose': grpc.unary_unary_rpc_method_handler( + servicer.GetObjectPose, + request_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__object__service__pb2.GetObjectPoseReq.FromString, + response_serializer=aimdk_dot_protocol_dot_sim_dot_sim__object__service__pb2.GetObjectPoseRsp.SerializeToString, + ), + 'GetObjectJoint': grpc.unary_unary_rpc_method_handler( + servicer.GetObjectJoint, + request_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__object__service__pb2.GetObjectJointReq.FromString, + response_serializer=aimdk_dot_protocol_dot_sim_dot_sim__object__service__pb2.GetObjectJointRsp.SerializeToString, + ), + 'SetTargetPoint': grpc.unary_unary_rpc_method_handler( + servicer.SetTargetPoint, + request_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__object__service__pb2.SetTargetPointReq.FromString, + response_serializer=aimdk_dot_protocol_dot_sim_dot_sim__object__service__pb2.SetTargetPointRsp.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'aimdk.protocol.SimObjectService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class SimObjectService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def AddObject(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.SimObjectService/AddObject', + aimdk_dot_protocol_dot_sim_dot_sim__object__service__pb2.AddObjectReq.SerializeToString, + aimdk_dot_protocol_dot_sim_dot_sim__object__service__pb2.AddObjectRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetObjectPose(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.SimObjectService/GetObjectPose', + aimdk_dot_protocol_dot_sim_dot_sim__object__service__pb2.GetObjectPoseReq.SerializeToString, + aimdk_dot_protocol_dot_sim_dot_sim__object__service__pb2.GetObjectPoseRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetObjectJoint(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.SimObjectService/GetObjectJoint', + aimdk_dot_protocol_dot_sim_dot_sim__object__service__pb2.GetObjectJointReq.SerializeToString, + aimdk_dot_protocol_dot_sim_dot_sim__object__service__pb2.GetObjectJointRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def SetTargetPoint(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.SimObjectService/SetTargetPoint', + aimdk_dot_protocol_dot_sim_dot_sim__object__service__pb2.SetTargetPointReq.SerializeToString, + aimdk_dot_protocol_dot_sim_dot_sim__object__service__pb2.SetTargetPointRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/data_gen_dependencies/aimdk/protocol/sim/sim_observation_service.proto b/data_gen_dependencies/aimdk/protocol/sim/sim_observation_service.proto new file mode 100644 index 0000000..1493617 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/sim/sim_observation_service.proto @@ -0,0 +1,287 @@ +syntax = "proto3"; +package aimdk.protocol; + +import public "aimdk/protocol/common/se3_pose.proto"; +import public "aimdk/protocol/common/joint.proto"; + +/** + * @brief 标签信息 + */ +message SemanticData { + string name = 2; + bytes data = 3; +} +message SemanticDict { + int32 label_id = 2; + string label_name = 3; +} +/** + * @brief 压缩图像数据 + */ +message CamImage { + string format = 2; + bytes data = 3; +} + +/** + * @brief 相机内参 + */ +message CamInfo { + int32 width = 1; + int32 height = 2; + float ppx = 3; + float ppy = 4; + float fx = 5; + float fy = 6; +} + +message CameraRsp { + CamInfo camera_info = 1; + CamImage rgb_camera = 2; + CamImage depth_camera = 3; + SemanticData semantic_mask = 4; + repeated SemanticDict label_dict = 5; +} + +message JointRsp { + repeated JointState left_arm = 1; + repeated JointState right_arm = 2; + repeated JointState body_arm = 3; + +} + +message ObjectRsp { + SE3RpyPose object_pose = 7; +} +message GripperRsp { + SE3RpyPose left_gripper = 1; + SE3RpyPose right_gripper = 2; + +} + +message CameraRequest{ + bool render_depth = 1; + bool render_semantic = 2; + repeated string camera_prim_list = 3; +} + + +message GripperRequest{ + bool left = 1; + bool right = 2; +} + + +/** + * @brief request + */ +message GetObservationReq { + bool isCam = 1; + CameraRequest CameraReq = 2; + bool isJoint = 3; + bool isPose = 4; + repeated string objectPrims = 5; + bool isGripper = 6; + GripperRequest gripperReq = 7; + bool startRecording = 8; + bool stopRecording = 9; + int32 fps = 10; + string task_name = 11; +} +/** + * @brief response + */ +message GetObservationRsp { + repeated CameraRsp camera = 1; + repeated ObjectRsp pose = 2; + JointRsp joint = 3; + GripperRsp gripper = 4; + string recordingState = 5; +} + + +message ResetReq{ + bool reset = 1; +} +message ResetRsp{ + string msg = 1; +} +message AttachReq{ + repeated string obj_prims = 1; + bool is_right = 2; +} +message AttachRsp{ + string msg = 1; +} +message DetachReq{ + bool detach = 1; +} +message DetachRsp{ + string msg = 1; +} +message MultiMoveReq{ + string robot_name = 1; + bool plan = 2; + repeated SE3RpyPose poses = 3; + CmdPlan cmd_plan = 4; +} +message MultiMoveRsp{ + repeated CmdPlan cmd_plans = 1; + string msg = 2; +} +message CmdPlan{ + repeated string joint_names = 1; + repeated SinglePlan joint_plans = 2; +} +message SinglePlan{ + repeated float joint_pos = 1; +} +message TaskStatusReq{ + bool isSuccess = 1; + repeated int32 failStep =2; + +} +message TaskStatusRsp{ + string msg = 1; +} + +message ExitReq{ + bool exit = 1; +} +message ExitRsp{ + string msg = 2; +} + +message GetObjectsOfTypeReq{ + string obj_type = 1; +} +message GetObjectsOfTypeRsp{ + repeated string prim_paths = 1; +} + +message InitRobotReq{ + string robot_cfg_file = 1; + string robot_usd_path = 2; + string scene_usd_path = 3; + SE3RpyPose robot_pose = 4; + string stand_type = 5; + float stand_size_x = 6; + float stand_size_y = 7; +} +message InitRobotRsp{ + string msg = 1; +} +message AddCameraReq{ + string camera_prim = 1; + SE3RpyPose camera_pose = 2; + float focus_length = 3; + float horizontal_aperture = 4; + float vertical_aperture = 5; + int32 width = 6; + int32 height = 7; + bool is_local = 8; +} +message AddCameraRsp{ + string msg = 1; +} +message DrawLineReq{ + repeated Vec3 point_list_1 = 1; + repeated Vec3 point_list_2 = 2; + repeated Vec3 colors = 3; + repeated float sizes = 4; + string name = 5; +} +message DrawLineRsp{ + string msg = 1; +} +message ClearLineReq{ + string name = 1; +} +message ClearLineRsp{ + string msg = 1; +} +message ObjectPose { + string prim_path = 1; + SE3RpyPose pose = 2; +} +message ObjectJoint { + string prim_path = 1; + repeated JointCommand joint_cmd = 2; +} +message SetObjectPoseReq { + repeated ObjectPose object_pose = 1; + repeated JointCommand joint_cmd = 2; + repeated ObjectJoint object_joint = 3; + +} +message SetObjectPoseRsp{ + string msg = 1; +} + +message SetCameraPoseReq{ + repeated ObjectPose camera_pose = 1; +} + +message SetCameraPoseRsp{ + string msg = 1; +} + +message SetTrajectoryListReq { + repeated SE3RpyPose trajectory_point = 1; + bool is_block = 2; +} +message SetTrajectoryListRsp { + string msg = 1; +} +message SetFrameStateReq { + string frame_state = 1; +} +message SetFrameStateRsp{ + string msg = 1; +} +message MaterialInfo { + string object_prim = 1; + string material_name = 2; + string material_path = 3; + string label_name = 4; +} +message SetMaterailReq { + repeated MaterialInfo materials = 1; +} +message SetMaterialRsp { + string msg = 1; +} +message LightCfg { + string light_type = 1; + string light_prim = 2; + float light_temperature = 3; + float light_intensity = 4; + Rpy light_rotation = 5; + string light_texture = 6; +} +message SetLightReq { + repeated LightCfg lights = 1; +} +message SetLightRsp { + string msg = 1; +} + +service SimObservationService { + rpc GetObservation(GetObservationReq) returns(GetObservationRsp); + rpc Reset(ResetReq) returns(ResetRsp); + rpc AttachObj(AttachReq) returns(AttachRsp); + rpc DetachObj(DetachReq) returns(DetachRsp); + rpc MultiMove(MultiMoveReq) returns(MultiMoveRsp); + rpc GetObjectsOfType(GetObjectsOfTypeReq) returns(GetObjectsOfTypeRsp); + rpc TaskStatus(TaskStatusReq) returns(TaskStatusRsp); + rpc Exit(ExitReq) returns(ExitRsp); + rpc InitRobot(InitRobotReq) returns(InitRobotRsp); + rpc AddCamera(AddCameraReq) returns(AddCameraRsp); + rpc DrawLine(DrawLineReq) returns(DrawLineRsp); + rpc ClearLine(ClearLineReq) returns(ClearLineRsp); + rpc SetObjectPose(SetObjectPoseReq) returns(SetObjectPoseRsp); + rpc SetTrajectoryList(SetTrajectoryListReq) returns(SetTrajectoryListRsp); + rpc SetFrameState(SetFrameStateReq) returns(SetFrameStateRsp); + rpc SetMaterial(SetMaterailReq) returns(SetMaterialRsp); + rpc SetLight (SetLightReq) returns (SetLightRsp); +} \ No newline at end of file diff --git a/data_gen_dependencies/aimdk/protocol/sim/sim_observation_service_pb2.py b/data_gen_dependencies/aimdk/protocol/sim/sim_observation_service_pb2.py new file mode 100644 index 0000000..4e7f2b4 --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/sim/sim_observation_service_pb2.py @@ -0,0 +1,142 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: aimdk/protocol/sim/sim_observation_service.proto +# Protobuf Python Version: 4.25.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from aimdk.protocol.common import se3_pose_pb2 as aimdk_dot_protocol_dot_common_dot_se3__pose__pb2 +try: + aimdk_dot_protocol_dot_common_dot_vec3__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk_dot_protocol_dot_common_dot_vec3__pb2 +except AttributeError: + aimdk_dot_protocol_dot_common_dot_vec3__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk.protocol.common.vec3_pb2 +try: + aimdk_dot_protocol_dot_common_dot_quaternion__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk_dot_protocol_dot_common_dot_quaternion__pb2 +except AttributeError: + aimdk_dot_protocol_dot_common_dot_quaternion__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk.protocol.common.quaternion_pb2 +try: + aimdk_dot_protocol_dot_common_dot_rpy__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk_dot_protocol_dot_common_dot_rpy__pb2 +except AttributeError: + aimdk_dot_protocol_dot_common_dot_rpy__pb2 = aimdk_dot_protocol_dot_common_dot_se3__pose__pb2.aimdk.protocol.common.rpy_pb2 +from aimdk.protocol.common import joint_pb2 as aimdk_dot_protocol_dot_common_dot_joint__pb2 + +from aimdk.protocol.common.se3_pose_pb2 import * +from aimdk.protocol.common.joint_pb2 import * + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n0aimdk/protocol/sim/sim_observation_service.proto\x12\x0e\x61imdk.protocol\x1a$aimdk/protocol/common/se3_pose.proto\x1a!aimdk/protocol/common/joint.proto\"*\n\x0cSemanticData\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\"4\n\x0cSemanticDict\x12\x10\n\x08label_id\x18\x02 \x01(\x05\x12\x12\n\nlabel_name\x18\x03 \x01(\t\"(\n\x08\x43\x61mImage\x12\x0e\n\x06\x66ormat\x18\x02 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\"Z\n\x07\x43\x61mInfo\x12\r\n\x05width\x18\x01 \x01(\x05\x12\x0e\n\x06height\x18\x02 \x01(\x05\x12\x0b\n\x03ppx\x18\x03 \x01(\x02\x12\x0b\n\x03ppy\x18\x04 \x01(\x02\x12\n\n\x02\x66x\x18\x05 \x01(\x02\x12\n\n\x02\x66y\x18\x06 \x01(\x02\"\xfe\x01\n\tCameraRsp\x12,\n\x0b\x63\x61mera_info\x18\x01 \x01(\x0b\x32\x17.aimdk.protocol.CamInfo\x12,\n\nrgb_camera\x18\x02 \x01(\x0b\x32\x18.aimdk.protocol.CamImage\x12.\n\x0c\x64\x65pth_camera\x18\x03 \x01(\x0b\x32\x18.aimdk.protocol.CamImage\x12\x33\n\rsemantic_mask\x18\x04 \x01(\x0b\x32\x1c.aimdk.protocol.SemanticData\x12\x30\n\nlabel_dict\x18\x05 \x03(\x0b\x32\x1c.aimdk.protocol.SemanticDict\"\x95\x01\n\x08JointRsp\x12,\n\x08left_arm\x18\x01 \x03(\x0b\x32\x1a.aimdk.protocol.JointState\x12-\n\tright_arm\x18\x02 \x03(\x0b\x32\x1a.aimdk.protocol.JointState\x12,\n\x08\x62ody_arm\x18\x03 \x03(\x0b\x32\x1a.aimdk.protocol.JointState\"<\n\tObjectRsp\x12/\n\x0bobject_pose\x18\x07 \x01(\x0b\x32\x1a.aimdk.protocol.SE3RpyPose\"q\n\nGripperRsp\x12\x30\n\x0cleft_gripper\x18\x01 \x01(\x0b\x32\x1a.aimdk.protocol.SE3RpyPose\x12\x31\n\rright_gripper\x18\x02 \x01(\x0b\x32\x1a.aimdk.protocol.SE3RpyPose\"X\n\rCameraRequest\x12\x14\n\x0crender_depth\x18\x01 \x01(\x08\x12\x17\n\x0frender_semantic\x18\x02 \x01(\x08\x12\x18\n\x10\x63\x61mera_prim_list\x18\x03 \x03(\t\"-\n\x0eGripperRequest\x12\x0c\n\x04left\x18\x01 \x01(\x08\x12\r\n\x05right\x18\x02 \x01(\x08\"\xa0\x02\n\x11GetObservationReq\x12\r\n\x05isCam\x18\x01 \x01(\x08\x12\x30\n\tCameraReq\x18\x02 \x01(\x0b\x32\x1d.aimdk.protocol.CameraRequest\x12\x0f\n\x07isJoint\x18\x03 \x01(\x08\x12\x0e\n\x06isPose\x18\x04 \x01(\x08\x12\x13\n\x0bobjectPrims\x18\x05 \x03(\t\x12\x11\n\tisGripper\x18\x06 \x01(\x08\x12\x32\n\ngripperReq\x18\x07 \x01(\x0b\x32\x1e.aimdk.protocol.GripperRequest\x12\x16\n\x0estartRecording\x18\x08 \x01(\x08\x12\x15\n\rstopRecording\x18\t \x01(\x08\x12\x0b\n\x03\x66ps\x18\n \x01(\x05\x12\x11\n\ttask_name\x18\x0b \x01(\t\"\xd5\x01\n\x11GetObservationRsp\x12)\n\x06\x63\x61mera\x18\x01 \x03(\x0b\x32\x19.aimdk.protocol.CameraRsp\x12\'\n\x04pose\x18\x02 \x03(\x0b\x32\x19.aimdk.protocol.ObjectRsp\x12\'\n\x05joint\x18\x03 \x01(\x0b\x32\x18.aimdk.protocol.JointRsp\x12+\n\x07gripper\x18\x04 \x01(\x0b\x32\x1a.aimdk.protocol.GripperRsp\x12\x16\n\x0erecordingState\x18\x05 \x01(\t\"\x19\n\x08ResetReq\x12\r\n\x05reset\x18\x01 \x01(\x08\"\x17\n\x08ResetRsp\x12\x0b\n\x03msg\x18\x01 \x01(\t\"0\n\tAttachReq\x12\x11\n\tobj_prims\x18\x01 \x03(\t\x12\x10\n\x08is_right\x18\x02 \x01(\x08\"\x18\n\tAttachRsp\x12\x0b\n\x03msg\x18\x01 \x01(\t\"\x1b\n\tDetachReq\x12\x0e\n\x06\x64\x65tach\x18\x01 \x01(\x08\"\x18\n\tDetachRsp\x12\x0b\n\x03msg\x18\x01 \x01(\t\"\x86\x01\n\x0cMultiMoveReq\x12\x12\n\nrobot_name\x18\x01 \x01(\t\x12\x0c\n\x04plan\x18\x02 \x01(\x08\x12)\n\x05poses\x18\x03 \x03(\x0b\x32\x1a.aimdk.protocol.SE3RpyPose\x12)\n\x08\x63md_plan\x18\x04 \x01(\x0b\x32\x17.aimdk.protocol.CmdPlan\"G\n\x0cMultiMoveRsp\x12*\n\tcmd_plans\x18\x01 \x03(\x0b\x32\x17.aimdk.protocol.CmdPlan\x12\x0b\n\x03msg\x18\x02 \x01(\t\"O\n\x07\x43mdPlan\x12\x13\n\x0bjoint_names\x18\x01 \x03(\t\x12/\n\x0bjoint_plans\x18\x02 \x03(\x0b\x32\x1a.aimdk.protocol.SinglePlan\"\x1f\n\nSinglePlan\x12\x11\n\tjoint_pos\x18\x01 \x03(\x02\"4\n\rTaskStatusReq\x12\x11\n\tisSuccess\x18\x01 \x01(\x08\x12\x10\n\x08\x66\x61ilStep\x18\x02 \x03(\x05\"\x1c\n\rTaskStatusRsp\x12\x0b\n\x03msg\x18\x01 \x01(\t\"\x17\n\x07\x45xitReq\x12\x0c\n\x04\x65xit\x18\x01 \x01(\x08\"\x16\n\x07\x45xitRsp\x12\x0b\n\x03msg\x18\x02 \x01(\t\"\'\n\x13GetObjectsOfTypeReq\x12\x10\n\x08obj_type\x18\x01 \x01(\t\")\n\x13GetObjectsOfTypeRsp\x12\x12\n\nprim_paths\x18\x01 \x03(\t\"\xc6\x01\n\x0cInitRobotReq\x12\x16\n\x0erobot_cfg_file\x18\x01 \x01(\t\x12\x16\n\x0erobot_usd_path\x18\x02 \x01(\t\x12\x16\n\x0escene_usd_path\x18\x03 \x01(\t\x12.\n\nrobot_pose\x18\x04 \x01(\x0b\x32\x1a.aimdk.protocol.SE3RpyPose\x12\x12\n\nstand_type\x18\x05 \x01(\t\x12\x14\n\x0cstand_size_x\x18\x06 \x01(\x02\x12\x14\n\x0cstand_size_y\x18\x07 \x01(\x02\"\x1b\n\x0cInitRobotRsp\x12\x0b\n\x03msg\x18\x01 \x01(\t\"\xd3\x01\n\x0c\x41\x64\x64\x43\x61meraReq\x12\x13\n\x0b\x63\x61mera_prim\x18\x01 \x01(\t\x12/\n\x0b\x63\x61mera_pose\x18\x02 \x01(\x0b\x32\x1a.aimdk.protocol.SE3RpyPose\x12\x14\n\x0c\x66ocus_length\x18\x03 \x01(\x02\x12\x1b\n\x13horizontal_aperture\x18\x04 \x01(\x02\x12\x19\n\x11vertical_aperture\x18\x05 \x01(\x02\x12\r\n\x05width\x18\x06 \x01(\x05\x12\x0e\n\x06height\x18\x07 \x01(\x05\x12\x10\n\x08is_local\x18\x08 \x01(\x08\"\x1b\n\x0c\x41\x64\x64\x43\x61meraRsp\x12\x0b\n\x03msg\x18\x01 \x01(\t\"\xa8\x01\n\x0b\x44rawLineReq\x12*\n\x0cpoint_list_1\x18\x01 \x03(\x0b\x32\x14.aimdk.protocol.Vec3\x12*\n\x0cpoint_list_2\x18\x02 \x03(\x0b\x32\x14.aimdk.protocol.Vec3\x12$\n\x06\x63olors\x18\x03 \x03(\x0b\x32\x14.aimdk.protocol.Vec3\x12\r\n\x05sizes\x18\x04 \x03(\x02\x12\x0c\n\x04name\x18\x05 \x01(\t\"\x1a\n\x0b\x44rawLineRsp\x12\x0b\n\x03msg\x18\x01 \x01(\t\"\x1c\n\x0c\x43learLineReq\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x1b\n\x0c\x43learLineRsp\x12\x0b\n\x03msg\x18\x01 \x01(\t\"I\n\nObjectPose\x12\x11\n\tprim_path\x18\x01 \x01(\t\x12(\n\x04pose\x18\x02 \x01(\x0b\x32\x1a.aimdk.protocol.SE3RpyPose\"Q\n\x0bObjectJoint\x12\x11\n\tprim_path\x18\x01 \x01(\t\x12/\n\tjoint_cmd\x18\x02 \x03(\x0b\x32\x1c.aimdk.protocol.JointCommand\"\xa7\x01\n\x10SetObjectPoseReq\x12/\n\x0bobject_pose\x18\x01 \x03(\x0b\x32\x1a.aimdk.protocol.ObjectPose\x12/\n\tjoint_cmd\x18\x02 \x03(\x0b\x32\x1c.aimdk.protocol.JointCommand\x12\x31\n\x0cobject_joint\x18\x03 \x03(\x0b\x32\x1b.aimdk.protocol.ObjectJoint\"\x1f\n\x10SetObjectPoseRsp\x12\x0b\n\x03msg\x18\x01 \x01(\t\"^\n\x14SetTrajectoryListReq\x12\x34\n\x10trajectory_point\x18\x01 \x03(\x0b\x32\x1a.aimdk.protocol.SE3RpyPose\x12\x10\n\x08is_block\x18\x02 \x01(\x08\"#\n\x14SetTrajectoryListRsp\x12\x0b\n\x03msg\x18\x01 \x01(\t\"\'\n\x10SetFrameStateReq\x12\x13\n\x0b\x66rame_state\x18\x01 \x01(\t\"\x1f\n\x10SetFrameStateRsp\x12\x0b\n\x03msg\x18\x01 \x01(\t\"e\n\x0cMaterialInfo\x12\x13\n\x0bobject_prim\x18\x01 \x01(\t\x12\x15\n\rmaterial_name\x18\x02 \x01(\t\x12\x15\n\rmaterial_path\x18\x03 \x01(\t\x12\x12\n\nlabel_name\x18\x04 \x01(\t\"A\n\x0eSetMaterailReq\x12/\n\tmaterials\x18\x01 \x03(\x0b\x32\x1c.aimdk.protocol.MaterialInfo\"\x1d\n\x0eSetMaterialRsp\x12\x0b\n\x03msg\x18\x01 \x01(\t\"\xaa\x01\n\x08LightCfg\x12\x12\n\nlight_type\x18\x01 \x01(\t\x12\x12\n\nlight_prim\x18\x02 \x01(\t\x12\x19\n\x11light_temperature\x18\x03 \x01(\x02\x12\x17\n\x0flight_intensity\x18\x04 \x01(\x02\x12+\n\x0elight_rotation\x18\x05 \x01(\x0b\x32\x13.aimdk.protocol.Rpy\x12\x15\n\rlight_texture\x18\x06 \x01(\t\"7\n\x0bSetLightReq\x12(\n\x06lights\x18\x01 \x03(\x0b\x32\x18.aimdk.protocol.LightCfg\"\x1a\n\x0bSetLightRsp\x12\x0b\n\x03msg\x18\x01 \x01(\t2\xa0\n\n\x15SimObservationService\x12V\n\x0eGetObservation\x12!.aimdk.protocol.GetObservationReq\x1a!.aimdk.protocol.GetObservationRsp\x12;\n\x05Reset\x12\x18.aimdk.protocol.ResetReq\x1a\x18.aimdk.protocol.ResetRsp\x12\x41\n\tAttachObj\x12\x19.aimdk.protocol.AttachReq\x1a\x19.aimdk.protocol.AttachRsp\x12\x41\n\tDetachObj\x12\x19.aimdk.protocol.DetachReq\x1a\x19.aimdk.protocol.DetachRsp\x12G\n\tMultiMove\x12\x1c.aimdk.protocol.MultiMoveReq\x1a\x1c.aimdk.protocol.MultiMoveRsp\x12\\\n\x10GetObjectsOfType\x12#.aimdk.protocol.GetObjectsOfTypeReq\x1a#.aimdk.protocol.GetObjectsOfTypeRsp\x12J\n\nTaskStatus\x12\x1d.aimdk.protocol.TaskStatusReq\x1a\x1d.aimdk.protocol.TaskStatusRsp\x12\x38\n\x04\x45xit\x12\x17.aimdk.protocol.ExitReq\x1a\x17.aimdk.protocol.ExitRsp\x12G\n\tInitRobot\x12\x1c.aimdk.protocol.InitRobotReq\x1a\x1c.aimdk.protocol.InitRobotRsp\x12G\n\tAddCamera\x12\x1c.aimdk.protocol.AddCameraReq\x1a\x1c.aimdk.protocol.AddCameraRsp\x12\x44\n\x08\x44rawLine\x12\x1b.aimdk.protocol.DrawLineReq\x1a\x1b.aimdk.protocol.DrawLineRsp\x12G\n\tClearLine\x12\x1c.aimdk.protocol.ClearLineReq\x1a\x1c.aimdk.protocol.ClearLineRsp\x12S\n\rSetObjectPose\x12 .aimdk.protocol.SetObjectPoseReq\x1a .aimdk.protocol.SetObjectPoseRsp\x12_\n\x11SetTrajectoryList\x12$.aimdk.protocol.SetTrajectoryListReq\x1a$.aimdk.protocol.SetTrajectoryListRsp\x12S\n\rSetFrameState\x12 .aimdk.protocol.SetFrameStateReq\x1a .aimdk.protocol.SetFrameStateRsp\x12M\n\x0bSetMaterial\x12\x1e.aimdk.protocol.SetMaterailReq\x1a\x1e.aimdk.protocol.SetMaterialRsp\x12\x44\n\x08SetLight\x12\x1b.aimdk.protocol.SetLightReq\x1a\x1b.aimdk.protocol.SetLightRspP\x00P\x01\x62\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'aimdk.protocol.sim.sim_observation_service_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _globals['_SEMANTICDATA']._serialized_start=141 + _globals['_SEMANTICDATA']._serialized_end=183 + _globals['_SEMANTICDICT']._serialized_start=185 + _globals['_SEMANTICDICT']._serialized_end=237 + _globals['_CAMIMAGE']._serialized_start=239 + _globals['_CAMIMAGE']._serialized_end=279 + _globals['_CAMINFO']._serialized_start=281 + _globals['_CAMINFO']._serialized_end=371 + _globals['_CAMERARSP']._serialized_start=374 + _globals['_CAMERARSP']._serialized_end=628 + _globals['_JOINTRSP']._serialized_start=631 + _globals['_JOINTRSP']._serialized_end=780 + _globals['_OBJECTRSP']._serialized_start=782 + _globals['_OBJECTRSP']._serialized_end=842 + _globals['_GRIPPERRSP']._serialized_start=844 + _globals['_GRIPPERRSP']._serialized_end=957 + _globals['_CAMERAREQUEST']._serialized_start=959 + _globals['_CAMERAREQUEST']._serialized_end=1047 + _globals['_GRIPPERREQUEST']._serialized_start=1049 + _globals['_GRIPPERREQUEST']._serialized_end=1094 + _globals['_GETOBSERVATIONREQ']._serialized_start=1097 + _globals['_GETOBSERVATIONREQ']._serialized_end=1385 + _globals['_GETOBSERVATIONRSP']._serialized_start=1388 + _globals['_GETOBSERVATIONRSP']._serialized_end=1601 + _globals['_RESETREQ']._serialized_start=1603 + _globals['_RESETREQ']._serialized_end=1628 + _globals['_RESETRSP']._serialized_start=1630 + _globals['_RESETRSP']._serialized_end=1653 + _globals['_ATTACHREQ']._serialized_start=1655 + _globals['_ATTACHREQ']._serialized_end=1703 + _globals['_ATTACHRSP']._serialized_start=1705 + _globals['_ATTACHRSP']._serialized_end=1729 + _globals['_DETACHREQ']._serialized_start=1731 + _globals['_DETACHREQ']._serialized_end=1758 + _globals['_DETACHRSP']._serialized_start=1760 + _globals['_DETACHRSP']._serialized_end=1784 + _globals['_MULTIMOVEREQ']._serialized_start=1787 + _globals['_MULTIMOVEREQ']._serialized_end=1921 + _globals['_MULTIMOVERSP']._serialized_start=1923 + _globals['_MULTIMOVERSP']._serialized_end=1994 + _globals['_CMDPLAN']._serialized_start=1996 + _globals['_CMDPLAN']._serialized_end=2075 + _globals['_SINGLEPLAN']._serialized_start=2077 + _globals['_SINGLEPLAN']._serialized_end=2108 + _globals['_TASKSTATUSREQ']._serialized_start=2110 + _globals['_TASKSTATUSREQ']._serialized_end=2162 + _globals['_TASKSTATUSRSP']._serialized_start=2164 + _globals['_TASKSTATUSRSP']._serialized_end=2192 + _globals['_EXITREQ']._serialized_start=2194 + _globals['_EXITREQ']._serialized_end=2217 + _globals['_EXITRSP']._serialized_start=2219 + _globals['_EXITRSP']._serialized_end=2241 + _globals['_GETOBJECTSOFTYPEREQ']._serialized_start=2243 + _globals['_GETOBJECTSOFTYPEREQ']._serialized_end=2282 + _globals['_GETOBJECTSOFTYPERSP']._serialized_start=2284 + _globals['_GETOBJECTSOFTYPERSP']._serialized_end=2325 + _globals['_INITROBOTREQ']._serialized_start=2328 + _globals['_INITROBOTREQ']._serialized_end=2526 + _globals['_INITROBOTRSP']._serialized_start=2528 + _globals['_INITROBOTRSP']._serialized_end=2555 + _globals['_ADDCAMERAREQ']._serialized_start=2558 + _globals['_ADDCAMERAREQ']._serialized_end=2769 + _globals['_ADDCAMERARSP']._serialized_start=2771 + _globals['_ADDCAMERARSP']._serialized_end=2798 + _globals['_DRAWLINEREQ']._serialized_start=2801 + _globals['_DRAWLINEREQ']._serialized_end=2969 + _globals['_DRAWLINERSP']._serialized_start=2971 + _globals['_DRAWLINERSP']._serialized_end=2997 + _globals['_CLEARLINEREQ']._serialized_start=2999 + _globals['_CLEARLINEREQ']._serialized_end=3027 + _globals['_CLEARLINERSP']._serialized_start=3029 + _globals['_CLEARLINERSP']._serialized_end=3056 + _globals['_OBJECTPOSE']._serialized_start=3058 + _globals['_OBJECTPOSE']._serialized_end=3131 + _globals['_OBJECTJOINT']._serialized_start=3133 + _globals['_OBJECTJOINT']._serialized_end=3214 + _globals['_SETOBJECTPOSEREQ']._serialized_start=3217 + _globals['_SETOBJECTPOSEREQ']._serialized_end=3384 + _globals['_SETOBJECTPOSERSP']._serialized_start=3386 + _globals['_SETOBJECTPOSERSP']._serialized_end=3417 + _globals['_SETTRAJECTORYLISTREQ']._serialized_start=3419 + _globals['_SETTRAJECTORYLISTREQ']._serialized_end=3513 + _globals['_SETTRAJECTORYLISTRSP']._serialized_start=3515 + _globals['_SETTRAJECTORYLISTRSP']._serialized_end=3550 + _globals['_SETFRAMESTATEREQ']._serialized_start=3552 + _globals['_SETFRAMESTATEREQ']._serialized_end=3591 + _globals['_SETFRAMESTATERSP']._serialized_start=3593 + _globals['_SETFRAMESTATERSP']._serialized_end=3624 + _globals['_MATERIALINFO']._serialized_start=3626 + _globals['_MATERIALINFO']._serialized_end=3727 + _globals['_SETMATERAILREQ']._serialized_start=3729 + _globals['_SETMATERAILREQ']._serialized_end=3794 + _globals['_SETMATERIALRSP']._serialized_start=3796 + _globals['_SETMATERIALRSP']._serialized_end=3825 + _globals['_LIGHTCFG']._serialized_start=3828 + _globals['_LIGHTCFG']._serialized_end=3998 + _globals['_SETLIGHTREQ']._serialized_start=4000 + _globals['_SETLIGHTREQ']._serialized_end=4055 + _globals['_SETLIGHTRSP']._serialized_start=4057 + _globals['_SETLIGHTRSP']._serialized_end=4083 + _globals['_SIMOBSERVATIONSERVICE']._serialized_start=4086 + _globals['_SIMOBSERVATIONSERVICE']._serialized_end=5398 +# @@protoc_insertion_point(module_scope) diff --git a/data_gen_dependencies/aimdk/protocol/sim/sim_observation_service_pb2_grpc.py b/data_gen_dependencies/aimdk/protocol/sim/sim_observation_service_pb2_grpc.py new file mode 100644 index 0000000..7e1239e --- /dev/null +++ b/data_gen_dependencies/aimdk/protocol/sim/sim_observation_service_pb2_grpc.py @@ -0,0 +1,594 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from aimdk.protocol.sim import sim_observation_service_pb2 as aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2 + + +class SimObservationServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.GetObservation = channel.unary_unary( + '/aimdk.protocol.SimObservationService/GetObservation', + request_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.GetObservationReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.GetObservationRsp.FromString, + ) + self.Reset = channel.unary_unary( + '/aimdk.protocol.SimObservationService/Reset', + request_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.ResetReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.ResetRsp.FromString, + ) + self.AttachObj = channel.unary_unary( + '/aimdk.protocol.SimObservationService/AttachObj', + request_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.AttachReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.AttachRsp.FromString, + ) + self.DetachObj = channel.unary_unary( + '/aimdk.protocol.SimObservationService/DetachObj', + request_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.DetachReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.DetachRsp.FromString, + ) + self.MultiMove = channel.unary_unary( + '/aimdk.protocol.SimObservationService/MultiMove', + request_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.MultiMoveReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.MultiMoveRsp.FromString, + ) + self.GetObjectsOfType = channel.unary_unary( + '/aimdk.protocol.SimObservationService/GetObjectsOfType', + request_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.GetObjectsOfTypeReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.GetObjectsOfTypeRsp.FromString, + ) + self.TaskStatus = channel.unary_unary( + '/aimdk.protocol.SimObservationService/TaskStatus', + request_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.TaskStatusReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.TaskStatusRsp.FromString, + ) + self.Exit = channel.unary_unary( + '/aimdk.protocol.SimObservationService/Exit', + request_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.ExitReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.ExitRsp.FromString, + ) + self.InitRobot = channel.unary_unary( + '/aimdk.protocol.SimObservationService/InitRobot', + request_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.InitRobotReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.InitRobotRsp.FromString, + ) + self.AddCamera = channel.unary_unary( + '/aimdk.protocol.SimObservationService/AddCamera', + request_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.AddCameraReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.AddCameraRsp.FromString, + ) + self.DrawLine = channel.unary_unary( + '/aimdk.protocol.SimObservationService/DrawLine', + request_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.DrawLineReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.DrawLineRsp.FromString, + ) + self.ClearLine = channel.unary_unary( + '/aimdk.protocol.SimObservationService/ClearLine', + request_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.ClearLineReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.ClearLineRsp.FromString, + ) + self.SetObjectPose = channel.unary_unary( + '/aimdk.protocol.SimObservationService/SetObjectPose', + request_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetObjectPoseReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetObjectPoseRsp.FromString, + ) + self.SetTrajectoryList = channel.unary_unary( + '/aimdk.protocol.SimObservationService/SetTrajectoryList', + request_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetTrajectoryListReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetTrajectoryListRsp.FromString, + ) + self.SetFrameState = channel.unary_unary( + '/aimdk.protocol.SimObservationService/SetFrameState', + request_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetFrameStateReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetFrameStateRsp.FromString, + ) + self.SetMaterial = channel.unary_unary( + '/aimdk.protocol.SimObservationService/SetMaterial', + request_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetMaterailReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetMaterialRsp.FromString, + ) + self.SetLight = channel.unary_unary( + '/aimdk.protocol.SimObservationService/SetLight', + request_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetLightReq.SerializeToString, + response_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetLightRsp.FromString, + ) + + +class SimObservationServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def GetObservation(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Reset(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AttachObj(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DetachObj(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def MultiMove(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetObjectsOfType(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def TaskStatus(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Exit(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def InitRobot(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AddCamera(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DrawLine(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ClearLine(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SetObjectPose(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SetTrajectoryList(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SetFrameState(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SetMaterial(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SetLight(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_SimObservationServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'GetObservation': grpc.unary_unary_rpc_method_handler( + servicer.GetObservation, + request_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.GetObservationReq.FromString, + response_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.GetObservationRsp.SerializeToString, + ), + 'Reset': grpc.unary_unary_rpc_method_handler( + servicer.Reset, + request_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.ResetReq.FromString, + response_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.ResetRsp.SerializeToString, + ), + 'AttachObj': grpc.unary_unary_rpc_method_handler( + servicer.AttachObj, + request_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.AttachReq.FromString, + response_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.AttachRsp.SerializeToString, + ), + 'DetachObj': grpc.unary_unary_rpc_method_handler( + servicer.DetachObj, + request_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.DetachReq.FromString, + response_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.DetachRsp.SerializeToString, + ), + 'MultiMove': grpc.unary_unary_rpc_method_handler( + servicer.MultiMove, + request_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.MultiMoveReq.FromString, + response_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.MultiMoveRsp.SerializeToString, + ), + 'GetObjectsOfType': grpc.unary_unary_rpc_method_handler( + servicer.GetObjectsOfType, + request_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.GetObjectsOfTypeReq.FromString, + response_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.GetObjectsOfTypeRsp.SerializeToString, + ), + 'TaskStatus': grpc.unary_unary_rpc_method_handler( + servicer.TaskStatus, + request_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.TaskStatusReq.FromString, + response_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.TaskStatusRsp.SerializeToString, + ), + 'Exit': grpc.unary_unary_rpc_method_handler( + servicer.Exit, + request_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.ExitReq.FromString, + response_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.ExitRsp.SerializeToString, + ), + 'InitRobot': grpc.unary_unary_rpc_method_handler( + servicer.InitRobot, + request_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.InitRobotReq.FromString, + response_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.InitRobotRsp.SerializeToString, + ), + 'AddCamera': grpc.unary_unary_rpc_method_handler( + servicer.AddCamera, + request_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.AddCameraReq.FromString, + response_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.AddCameraRsp.SerializeToString, + ), + 'DrawLine': grpc.unary_unary_rpc_method_handler( + servicer.DrawLine, + request_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.DrawLineReq.FromString, + response_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.DrawLineRsp.SerializeToString, + ), + 'ClearLine': grpc.unary_unary_rpc_method_handler( + servicer.ClearLine, + request_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.ClearLineReq.FromString, + response_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.ClearLineRsp.SerializeToString, + ), + 'SetObjectPose': grpc.unary_unary_rpc_method_handler( + servicer.SetObjectPose, + request_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetObjectPoseReq.FromString, + response_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetObjectPoseRsp.SerializeToString, + ), + 'SetTrajectoryList': grpc.unary_unary_rpc_method_handler( + servicer.SetTrajectoryList, + request_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetTrajectoryListReq.FromString, + response_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetTrajectoryListRsp.SerializeToString, + ), + 'SetFrameState': grpc.unary_unary_rpc_method_handler( + servicer.SetFrameState, + request_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetFrameStateReq.FromString, + response_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetFrameStateRsp.SerializeToString, + ), + 'SetMaterial': grpc.unary_unary_rpc_method_handler( + servicer.SetMaterial, + request_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetMaterailReq.FromString, + response_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetMaterialRsp.SerializeToString, + ), + 'SetLight': grpc.unary_unary_rpc_method_handler( + servicer.SetLight, + request_deserializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetLightReq.FromString, + response_serializer=aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetLightRsp.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'aimdk.protocol.SimObservationService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class SimObservationService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def GetObservation(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.SimObservationService/GetObservation', + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.GetObservationReq.SerializeToString, + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.GetObservationRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def Reset(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.SimObservationService/Reset', + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.ResetReq.SerializeToString, + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.ResetRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def AttachObj(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.SimObservationService/AttachObj', + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.AttachReq.SerializeToString, + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.AttachRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def DetachObj(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.SimObservationService/DetachObj', + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.DetachReq.SerializeToString, + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.DetachRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def MultiMove(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.SimObservationService/MultiMove', + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.MultiMoveReq.SerializeToString, + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.MultiMoveRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetObjectsOfType(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.SimObservationService/GetObjectsOfType', + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.GetObjectsOfTypeReq.SerializeToString, + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.GetObjectsOfTypeRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def TaskStatus(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.SimObservationService/TaskStatus', + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.TaskStatusReq.SerializeToString, + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.TaskStatusRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def Exit(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.SimObservationService/Exit', + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.ExitReq.SerializeToString, + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.ExitRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def InitRobot(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.SimObservationService/InitRobot', + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.InitRobotReq.SerializeToString, + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.InitRobotRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def AddCamera(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.SimObservationService/AddCamera', + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.AddCameraReq.SerializeToString, + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.AddCameraRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def DrawLine(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.SimObservationService/DrawLine', + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.DrawLineReq.SerializeToString, + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.DrawLineRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ClearLine(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.SimObservationService/ClearLine', + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.ClearLineReq.SerializeToString, + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.ClearLineRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def SetObjectPose(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.SimObservationService/SetObjectPose', + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetObjectPoseReq.SerializeToString, + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetObjectPoseRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def SetTrajectoryList(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.SimObservationService/SetTrajectoryList', + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetTrajectoryListReq.SerializeToString, + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetTrajectoryListRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def SetFrameState(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.SimObservationService/SetFrameState', + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetFrameStateReq.SerializeToString, + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetFrameStateRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def SetMaterial(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.SimObservationService/SetMaterial', + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetMaterailReq.SerializeToString, + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetMaterialRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def SetLight(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/aimdk.protocol.SimObservationService/SetLight', + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetLightReq.SerializeToString, + aimdk_dot_protocol_dot_sim_dot_sim__observation__service__pb2.SetLightRsp.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/data_gen_dependencies/base_agent.py b/data_gen_dependencies/base_agent.py new file mode 100644 index 0000000..f47632b --- /dev/null +++ b/data_gen_dependencies/base_agent.py @@ -0,0 +1,307 @@ +# -*- coding: utf-8 -*- +import json +import time +import numpy as np +from scipy.spatial.transform import Rotation as R, Slerp + +from data_gen_dependencies.robot import IsaacSimRpcRobot + + +class BaseAgent(object): + def __init__(self, robot: IsaacSimRpcRobot) -> None: + self.robot = robot + self.reset() + + def reset(self): + self.robot.reset() + # self.base_camera = "/World/Top_Camera" + # self.robot.base_camera = "/World/Top_Camera" + # self.data_keys = { + # "camera": { + # "camera_prim_list": [self.base_camera], + # "render_depth": True, + # "render_semantic": True, + # }, + # "pose": [self.base_camera], + # "joint_position": True, + # "gripper": True, + # } + # self.get_observation() + + def add_object(self, object_info: dict): + name = object_info["object_id"] + usd_path = object_info["model_path"] + position = np.array(object_info["position"]) + quaternion = np.array(object_info["quaternion"]) + if "scale" not in object_info: + object_info["scale"] = 1.0 + size = object_info.get('size', np.array([])) + if isinstance(size, list): + size = np.array(size) + if size.shape[0] == 0: + size = np.array(100) + + if np.max(size) > 10: + object_info["scale"] = 0.001 + + add_particle = False + particle_position = [0, 0, 0] + particle_scale = [1, 1, 1] + if "add_particle" in object_info: + add_particle = True + particle_position = object_info["add_particle"]["position"] + particle_scale = object_info["add_particle"]["scale"] + scale = np.array([object_info["scale"]] * 3) + material = "general" if "material" not in object_info else object_info["material"] + mass = 0.01 if "mass" not in object_info else object_info["mass"] + # if "materialOptions" in object_info: + # if "transparent" in object_info["materialOptions"]: + # material="Glass" + self.robot.client.add_object( + usd_path=usd_path, + prim_path="/World/Objects/%s" % name, + label_name=name, + target_position=position, + target_quaternion=quaternion, + target_scale=scale, + material=material, + color=[1, 1, 1], + mass=mass, + add_particle=add_particle, + particle_position=particle_position, + particle_scale=particle_scale + ) + + def generate_layout(self, task_file, init=True): + with open(task_file, "r") as f: + task_info = json.load(f) + + # init_position = task_info["init_position"] if "init_position" in task_info else [0] * 39 + # self.robot.client.set_joint_positions(init_position) + if init: + for object_info in task_info["objects"]: + if "box" not in object_info["name"]: + continue + self.add_object(object_info) + time.sleep(1) + + for object_info in task_info["objects"]: + if "obj" not in object_info["name"]: + continue + self.add_object(object_info) + time.sleep(1) + + self.task_info = task_info + self.robot.target_object = task_info["target_object"] + self.arm = task_info["arm"] + return task_info + + def execute(self, commands): + for command in commands: + type = command["action"] + content = command["content"] + if type == "move" or type == "rotate": + if self.robot.move(content) == False: + return False + else: + self.last_pose = content["matrix"] + elif type == "open_gripper": + id = "left" if "gripper" not in content else content["gripper"] + width = 0.1 if "width" not in content else content["width"] + self.robot.open_gripper(id=id, width=width) + + elif type == "close_gripper": + id = "left" if "gripper" not in content else content["gripper"] + force = 50 if "force" not in content else content["force"] + self.robot.close_gripper(id=id, force=force) + if "attach_obj" in content: + self.robot.client.AttachObj(prim_paths=[content["attach_obj"]]) + + else: + Exception("Not implemented.") + return True + + def start_recording(self, task_name, camera_prim_list): + print(camera_prim_list) + self.robot.client.start_recording( + task_name=task_name, + fps=30, + data_keys={ + "camera": { + "camera_prim_list": camera_prim_list, + "render_depth": False, + "render_semantic": False, + }, + "pose": ["/World/Raise_A2/gripper_center"], + "joint_position": True, + "gripper": True, + }, + ) + + def stop_recording(self, success): + self.robot.client.stop_recording() + self.robot.client.SendTaskStatus(success) + + def grasp( + self, + target_gripper_pose, + gripper_id=None, + use_pre_grasp=False, + use_pick_up=False, + grasp_width=0.1, + ): + gripper_id = "left" if gripper_id is None else gripper_id + pick_up_pose = np.copy(target_gripper_pose) + pick_up_pose[2, 3] = pick_up_pose[2, 3] + 0.1 # 抓到物体后,先往上方抬10cm + + commands = [] + commands.append( + { + "action": "open_gripper", + "content": { + "gripper": gripper_id, + "width": grasp_width, + }, + } + ) + if use_pre_grasp: + pre_pose = np.array([ + [1.0, 0, 0, 0], + [0, 1.0, 0, 0], + [0, 0, 1.0, -0.05], + [0, 0, 0, 1] + ]) + pre_grasp_pose = target_gripper_pose @ pre_pose + commands.append( + { + "action": "move", + "content": { + "matrix": pre_grasp_pose, + "type": "matrix", + "comment": "pre_grasp", + "trajectory_type": "ObsAvoid", + }, + } + ) + + grasp_trajectory_type = "Simple" if use_pre_grasp else "ObsAvoid" + commands.append( + { + "action": "move", + "content": { + "matrix": target_gripper_pose, + "type": "matrix", + "comment": "grasp", + "trajectory_type": grasp_trajectory_type, + }, + } + ) + commands.append( + { + "action": "close_gripper", + "content": { + "gripper": gripper_id, + "force": 50, + }, + } + ) + + if use_pick_up: + commands.append( + { + "action": "move", + "content": { + "matrix": pick_up_pose, + "type": "matrix", + "comment": "pick_up", + "trajectory_type": "Simple", + }, + } + ) + + return commands + + def get_observation(self): + observation_raw = self.robot.get_observation(self.data_keys) + self.observation = observation_raw["camera"][self.robot.base_camera] + + def place( + self, + target_gripper_pose, + current_gripper_pose=None, + gripper2part=None, + gripper_id=None, + ): + gripper_id = "left" if gripper_id is None else gripper_id + + pre_place_pose = np.copy(target_gripper_pose) + pre_place_pose[2, 3] += 0.05 + + commands = [ + { + "action": "move", + "content": { + "matrix": pre_place_pose, + "type": "matrix", + "comment": "pre_place", + "trajectory_type": "ObsAvoid", + }, + }, + { + "action": "move", + "content": { + "matrix": target_gripper_pose, + "type": "matrix", + "comment": "place", + "trajectory_type": "Simple", + }, + }, + { + "action": "open_gripper", + "content": { + "gripper": gripper_id, + }, + }, + ] + return commands + + def pour( + self, + target_gripper_pose, + current_gripper_pose=None, + gripper2part=None, + gripper_id=None, + ): + def interpolate_rotation_matrices(rot_matrix1, rot_matrix2, num_interpolations): + rot1 = R.from_matrix(rot_matrix1) + rot2 = R.from_matrix(rot_matrix2) + quat1 = rot1.as_quat() + quat2 = rot2.as_quat() + times = [0, 1] + slerp = Slerp(times, R.from_quat([quat1, quat2])) + interp_times = np.linspace(0, 1, num_interpolations) + interp_rots = slerp(interp_times) + interp_matrices = interp_rots.as_matrix() + return interp_matrices + + target_part_pose = target_gripper_pose @ np.linalg.inv(gripper2part) + current_part_pose = current_gripper_pose @ np.linalg.inv(gripper2part) + commands = [] + rotations = interpolate_rotation_matrices(current_part_pose[:3, :3], target_part_pose[:3, :3], 5) + for i, rotation in enumerate(rotations): + target_part_pose_step = np.copy(target_part_pose) + target_part_pose_step[:3, :3] = rotation + target_gripper_pose_step = target_part_pose_step @ gripper2part + + commands.append( + { + "action": "move", + "content": { + "matrix": target_gripper_pose_step, + "type": "matrix", + "comment": "pour_sub_rotate_%d" % i, + "trajectory_type": "Simple", + }, + } + ) + return commands diff --git a/data_gen_dependencies/client.py b/data_gen_dependencies/client.py new file mode 100644 index 0000000..43f879a --- /dev/null +++ b/data_gen_dependencies/client.py @@ -0,0 +1,962 @@ +import grpc +import numpy as np +import sys +import os + +current_directory = os.path.dirname(os.path.abspath(__file__)) +if current_directory not in sys.path: + sys.path.append(current_directory) +# 相机协议 +from aimdk.protocol.hal.sensors import rs2_camera_pb2 +from aimdk.protocol.hal.sensors import rs2_camera_pb2_grpc +from aimdk.protocol.sim import sim_camera_service_pb2 +from aimdk.protocol.sim import sim_camera_service_pb2_grpc +# 臂协议 +from aimdk.protocol.hal.jaka import jaka_pb2 +from aimdk.protocol.hal.jaka import jaka_pb2_grpc +# 关节协议 +from aimdk.protocol.hal.joint import joint_channel_pb2 +from aimdk.protocol.hal.joint import joint_channel_pb2_grpc +# 夹爪协议 +from aimdk.protocol.sim import sim_gripper_service_pb2 +from aimdk.protocol.sim import sim_gripper_service_pb2_grpc +# 物体协议 +from aimdk.protocol.sim import sim_object_service_pb2 +from aimdk.protocol.sim import sim_object_service_pb2_grpc +# observation协议 +from aimdk.protocol.sim import sim_observation_service_pb2 +from aimdk.protocol.sim import sim_observation_service_pb2_grpc +import time +import json +import pinocchio +import os + + +# 目前代码中所有的旋转角度都以角度为单位 +class Rpc_Client: + def __init__(self, client_host, robot_urdf="A2D_120s.urdf"): + for i in range(600): + try: + self.channel = grpc.insecure_channel(client_host, options=[ + ('grpc.max_receive_message_length', 16094304) + ]) + grpc.channel_ready_future(self.channel).result(timeout=5) + self.robot_urdf = robot_urdf + except grpc.FutureTimeoutError as e: + print(f"Failed to connect to gRPC server[{i}] ({client_host}): {e}", flush=True) + time.sleep(3) + if i >= 599: + raise e + except grpc.RpcError as e: + print(f"Failed to connect to gRPC server[{i}] ({client_host}): {e}", flush=True) + time.sleep(3) + if i >= 599: + raise e + + def set_frame_state(self, action: str, substage_id: int, active_id: str, passive_id: str, if_attached: bool, + set_gripper_open=False, set_gripper_close=False, arm="default", target_pose=None): + stub = sim_observation_service_pb2_grpc.SimObservationServiceStub(self.channel) + req = sim_observation_service_pb2.SetFrameStateReq() + if target_pose is not None: + target_pose = target_pose.tolist() + frame_state = { + "action": action, + "substage_id": substage_id, + "if_attached": if_attached, + "set_gripper_open": set_gripper_open, + "set_gripper_close": set_gripper_close, + "active_id": active_id, + "passive_id": passive_id, + "arm": arm, + "target_pose": target_pose + } + _frame_state: str = json.dumps(frame_state) + print(_frame_state) + req.frame_state = _frame_state + response = stub.SetFrameState(req) + return response + + def capture_frame(self, camera_prim_path): + stub = rs2_camera_pb2_grpc.CameraServiceStub(self.channel) + req = rs2_camera_pb2.GetCameraDataRequest() + req.serial_no = camera_prim_path + response = stub.GetCameraData(req) + # print(response.color_info) + # if response.color_image.data is not None: + # print(np.frombuffer(response.color_image.data, dtype=np.uint8)) + # print(np.frombuffer(response.depth_image.data, dtype=np.float32)) + return response + + def capture_semantic_frame(self, camera_prim_path): + stub = sim_camera_service_pb2_grpc.SimCameraServiceStub(self.channel) + req = sim_camera_service_pb2.GetSemanticRequest() + req.serial_no = "/World/Raise_A2/base/collisions/camera" + req.serial_no = camera_prim_path + response = stub.GetSemanticData(req) + # if response.semantic_mask.data is not None: + # print(np.frombuffer(response.semantic_mask.data, dtype=np.int16)) + + return response + + def moveto(self, target_position, target_quaternion, arm_name, is_backend=True, ee_interpolation=False, + distance_frame=0.0008): + stub = jaka_pb2_grpc.A2wArmControlServiceStub(self.channel) + req = jaka_pb2.LinearMoveReq() + req.robot_name = arm_name + req.pose.position.x, req.pose.position.y, req.pose.position.z = target_position + req.pose.rpy.rw, req.pose.rpy.rx, req.pose.rpy.ry, req.pose.rpy.rz = target_quaternion + req.is_block = is_backend + req.ee_interpolation = ee_interpolation + req.distance_frame = distance_frame + response = stub.LinearMove(req) + return response + + def set_joint_positions(self, target_joint_position, is_trajectory): + stub = joint_channel_pb2_grpc.JointControlServiceStub(self.channel) + req = joint_channel_pb2.SetJointReq() + req.is_trajectory = is_trajectory + for pos in target_joint_position: + joint_position = joint_channel_pb2.JointCommand() + joint_position.position = pos + req.commands.append(joint_position) + response = stub.SetJointPosition(req) + return response + + def get_joint_positions(self): + stub = joint_channel_pb2_grpc.JointControlServiceStub(self.channel) + req = joint_channel_pb2.GetJointReq() + req.serial_no = "robot" + response = stub.GetJointPosition(req) + return response + + # usd_path目前值定在data下的目录 如genie3D/01.usd + # prim_path需要指定在"/World/Objects/xxx"内,方便reset + def add_object(self, usd_path, prim_path, label_name, target_position, target_quaternion, target_scale, color, + material, mass=0.01, + add_particle=True, particle_position=[0, 0, 0], particle_scale=[0.1, 0.1, 0.1], + particle_color=[1, 1, 1]): + stub = sim_object_service_pb2_grpc.SimObjectServiceStub(self.channel) + req = sim_object_service_pb2.AddObjectReq() + req.usd_path = usd_path + req.prim_path = prim_path + req.label_name = label_name + req.object_color.r, req.object_color.g, req.object_color.b = color + req.object_pose.position.x, req.object_pose.position.y, req.object_pose.position.z = target_position + req.object_pose.rpy.rw, req.object_pose.rpy.rx, req.object_pose.rpy.ry, req.object_pose.rpy.rz = target_quaternion + req.object_scale.x, req.object_scale.y, req.object_scale.z = target_scale + req.object_material = material + req.object_mass = mass + req.add_particle = add_particle + req.particle_position.x, req.particle_position.y, req.particle_position.z = particle_position + req.particle_scale.x, req.particle_scale.y, req.particle_scale.z = particle_scale + req.particle_color.r, req.particle_color.g, req.particle_color.b = particle_color + + response = stub.AddObject(req) + return response + + def get_object_pose(self, prim_path): + stub = sim_object_service_pb2_grpc.SimObjectServiceStub(self.channel) + req = sim_object_service_pb2.GetObjectPoseReq() + req.prim_path = prim_path + response = stub.GetObjectPose(req) + return response + + def get_object_joint(self, prim_path): + stub = sim_object_service_pb2_grpc.SimObjectServiceStub(self.channel) + req = sim_object_service_pb2.GetObjectJointReq() + req.prim_path = prim_path + response = stub.GetObjectJoint(req) + return response + + def set_gripper_state(self, gripper_command, is_right, opened_width): + stub = sim_gripper_service_pb2_grpc.SimGripperServiceStub(self.channel) + req = sim_gripper_service_pb2.SetGripperStateReq() + req.gripper_command = gripper_command + req.is_right = is_right + req.opened_width = opened_width + response = stub.SetGripperState(req) + return response + + def get_gripper_state(self, is_right): + stub = sim_gripper_service_pb2_grpc.SimGripperServiceStub(self.channel) + req = sim_gripper_service_pb2.GetGripperStateReq() + req.is_right = is_right + response = stub.GetGripperState(req) + return response + + # client端获取get某一帧的全部observation + def get_observation(self, data_keys): + observation = {} + stub = sim_observation_service_pb2_grpc.SimObservationServiceStub(self.channel) + req = sim_observation_service_pb2.GetObservationReq() + if 'camera' in data_keys: + req.isCam = True + req.CameraReq.render_depth = data_keys['camera']['render_depth'] + req.CameraReq.render_semantic = data_keys['camera']['render_semantic'] + for prim in data_keys['camera']['camera_prim_list']: + req.CameraReq.camera_prim_list.append(prim) + if 'pose' in data_keys: + req.isPose = True + for pose in data_keys['pose']: + req.objectPrims.append(pose) + req.isJoint = data_keys['joint_position'] + req.isGripper = data_keys['gripper'] + response = stub.GetObservation(req) + + # protobuf转字典 + camera_datas = {} + for camera_data, camera_prim in zip(response.camera, data_keys['camera']['camera_prim_list']): + cam_data = { + "rgb_camera": np.frombuffer(camera_data.rgb_camera.data, dtype=np.uint8), + "depth_camera": np.frombuffer(camera_data.depth_camera.data, dtype=np.float32), + "camera_info": {"width": camera_data.camera_info.width, + "height": camera_data.camera_info.height, + "ppx": camera_data.camera_info.ppx, + "ppy": camera_data.camera_info.ppy, + "fx": camera_data.camera_info.fx, + "fy": camera_data.camera_info.fy} + } + camera_datas[camera_prim] = cam_data + joint_datas = {} + for joint in response.joint.left_arm: + joint_datas[joint.name] = joint.position + object_datas = {} + if 'pose' in data_keys: + for object, obj_name in zip(response.pose, data_keys['pose']): + object_data = { + "position": np.array( + [object.object_pose.position.x, object.object_pose.position.y, object.object_pose.position.z]), + "rotation": np.array( + [object.object_pose.rpy.rw, object.object_pose.rpy.rx, object.object_pose.rpy.ry, + object.object_pose.rpy.rz]) + } + object_datas[obj_name] = object_data + gripper_datas = { + "left": { + "position": np.array( + [response.gripper.left_gripper.position.x, response.gripper.left_gripper.position.y, + response.gripper.left_gripper.position.z]), + "rotation": np.array([response.gripper.left_gripper.rpy.rw, response.gripper.left_gripper.rpy.rx, + response.gripper.left_gripper.rpy.ry, response.gripper.left_gripper.rpy.rz]) + + }, + "right": { + "position": np.array( + [response.gripper.right_gripper.position.x, response.gripper.right_gripper.position.y, + response.gripper.right_gripper.position.z]), + "rotation": np.array([response.gripper.right_gripper.rpy.rw, response.gripper.right_gripper.rpy.rx, + response.gripper.right_gripper.rpy.ry, response.gripper.right_gripper.rpy.rz]) + }} + observation = { + "camera": camera_datas, + "joint": joint_datas, + "pose": object_datas, + "gripper": gripper_datas + } + return observation + + # client端启动开始录制 + def start_recording(self, data_keys, fps, task_name): + stub = sim_observation_service_pb2_grpc.SimObservationServiceStub(self.channel) + req = sim_observation_service_pb2.GetObservationReq() + req.startRecording = True + req.fps = fps + req.task_name = task_name + if 'camera' in data_keys: + render_depth = data_keys['camera']['render_depth'] + render_semantic = data_keys['camera']['render_semantic'] + camera_prim_list = data_keys['camera']['camera_prim_list'] + req.isCam = True + req.CameraReq.render_depth = data_keys['camera']['render_depth'] + req.CameraReq.render_semantic = data_keys['camera']['render_semantic'] + for prim in data_keys['camera']['camera_prim_list']: + req.CameraReq.camera_prim_list.append(prim) + if data_keys['pose']: + req.isPose = True + for pose in data_keys['pose']: + req.objectPrims.append(pose) + req.isJoint = data_keys['joint_position'] + req.isGripper = data_keys['gripper'] + response = stub.GetObservation(req) + return (response) + + # client端启动结束录制 + def stop_recording(self): + stub = sim_observation_service_pb2_grpc.SimObservationServiceStub(self.channel) + req = sim_observation_service_pb2.GetObservationReq() + req.stopRecording = True + response = stub.GetObservation(req) + return (response) + + def reset(self): + stub = sim_observation_service_pb2_grpc.SimObservationServiceStub(self.channel) + req = sim_observation_service_pb2.ResetReq() + req.reset = True + response = stub.Reset(req) + return (response) + + def AttachObj(self, prim_paths, is_right=True): + stub = sim_observation_service_pb2_grpc.SimObservationServiceStub(self.channel) + req = sim_observation_service_pb2.AttachReq() + req.is_right = is_right + for prim in prim_paths: + req.obj_prims.append(prim) + response = stub.AttachObj(req) + return (response) + + def DetachObj(self): + stub = sim_observation_service_pb2_grpc.SimObservationServiceStub(self.channel) + req = sim_observation_service_pb2.DetachReq() + req.detach = True + response = stub.DetachObj(req) + return (response) + + def MultiPlan(self, robot_name, target_poses): + stub = sim_observation_service_pb2_grpc.SimObservationServiceStub(self.channel) + req = sim_observation_service_pb2.MultiMoveReq() + req.plan = True + req.robot_name = robot_name + req.plans_index = 0 + for pose in target_poses: + _pose = sim_observation_service_pb2.SE3RpyPose() + _pose.position.x, _pose.position.y, _pose.position.z = pose[0] + _pose.rpy.rw, _pose.rpy.rx, _pose.rpy.ry, _pose.rpy.rz = pose[1] + req.poses.append(_pose) + response = stub.MultiMove(req) + return (response) + + def MultiMove(self, robot_name, cmd_plan): + stub = sim_observation_service_pb2_grpc.SimObservationServiceStub(self.channel) + req = sim_observation_service_pb2.MultiMoveReq() + req.plan = False + req.robot_name = robot_name + req.cmd_plan = cmd_plan + response = stub.MultiMove(req) + return (response) + + def SendTaskStatus(self, isSuccess, fail_stage_step): + stub = sim_observation_service_pb2_grpc.SimObservationServiceStub(self.channel) + req = sim_observation_service_pb2.TaskStatusReq() + for step in fail_stage_step: + req.failStep.append(step) + req.isSuccess = isSuccess + response = stub.TaskStatus(req) + return (response) + + def Exit(self): + stub = sim_observation_service_pb2_grpc.SimObservationServiceStub(self.channel) + req = sim_observation_service_pb2.ExitReq() + req.exit = True + response = stub.Exit(req) + return (response) + + def GetEEPose(self, is_right): + stub = joint_channel_pb2_grpc.JointControlServiceStub(self.channel) + req = joint_channel_pb2.GetEEPoseReq() + req.is_right = is_right + response = stub.GetEEPose(req) + return response + + def GetIKStatus(self, target_poses, is_right, ObsAvoid=False): + stub = joint_channel_pb2_grpc.JointControlServiceStub(self.channel) + req = joint_channel_pb2.GetIKStatusReq() + req.is_right = is_right + req.ObsAvoid = ObsAvoid + urdf = os.path.dirname(os.path.abspath(__file__)) + "/robot_urdf/" + self.robot_urdf + model = pinocchio.buildModelFromUrdf(urdf) + data = model.createData() + joint_names = [] + for name in model.names: + joint_names.append(str(name)) + + for pose in target_poses: + _pose = sim_observation_service_pb2.SE3RpyPose() + _pose.position.x, _pose.position.y, _pose.position.z = pose["position"] + _pose.rpy.rw, _pose.rpy.rx, _pose.rpy.ry, _pose.rpy.rz = pose["rotation"] + req.target_pose.append(_pose) + response = stub.GetIKStatus(req) + result = [] + for ik_status in response.IKStatus: + joint_datas = {} + for name, position in zip(ik_status.joint_names, ik_status.joint_positions): + joint_datas[name] = position + + joint_positions = [] + for name in model.names: + if name == 'universe': + continue + if name not in joint_datas: + joint_positions.append(0) + else: + joint_positions.append(joint_datas[name]) + joint_positions = np.array(joint_positions) + + pinocchio.forwardKinematics(model, data, joint_positions) + if ("A2D" in self.robot_urdf): + J = pinocchio.computeJointJacobian(model, data, joint_positions, 24) + else: + J = pinocchio.computeJointJacobian(model, data, joint_positions, 7) + manip = np.sqrt(np.linalg.det(np.dot(J, J.T))) + result.append({ + "status": ik_status.isSuccess, + "Jacobian": manip, + "joint_positions": ik_status.joint_positions, + "joint_names": ik_status.joint_names + }) + return result + + def GetManipulability(self, joint_data): + urdf = os.path.dirname(os.path.abspath(__file__)) + "/robot_urdf/" + self.robot_urdf + model = pinocchio.buildModelFromUrdf(urdf) + data = model.createData() + joint_id = 24 + joint_positions = [] + for name in model.names: + if name == 'universe': + continue + # if name not in joint_data: + if name not in ['joint_lift_body', 'joint_body_pitch', 'Joint1_r', 'Joint2_r', 'Joint3_r', 'Joint4_r', + 'Joint5_r', 'Joint6_r', 'Joint7_r', 'right_Left_1_Joint', 'right_Left_0_Joint', + 'right_Left_Support_Joint', 'right_Right_1_Joint', 'right_Right_0_Joint', + 'right_Right_Support_Joint']: + joint_positions.append(0) + else: + joint_positions.append(joint_data[name]) + joint_positions = np.array(joint_positions) + print(joint_positions) + pinocchio.forwardKinematics(model, data, joint_positions) + J = pinocchio.computeFrameJacobian(model, data, joint_positions, joint_id) + manip = np.sqrt(np.linalg.det(np.dot(J, J.T))) + return manip + + def GetObjectsOfType(self, obj_type): + stub = sim_observation_service_pb2_grpc.SimObservationServiceStub(self.channel) + req = sim_observation_service_pb2.GetObjectsOfTypeReq() + req.obj_type = obj_type + response = stub.GetObjectsOfType(req) + return (response) + + def AddCamera(self, camera_prim, camera_position, camera_rotation, width, height, focus_length, horizontal_aperture, + vertical_aperture, is_local): + stub = sim_observation_service_pb2_grpc.SimObservationServiceStub(self.channel) + req = sim_observation_service_pb2.AddCameraReq() + req.camera_prim = camera_prim + req.camera_pose.position.x, req.camera_pose.position.y, req.camera_pose.position.z = camera_position + req.camera_pose.rpy.rw, req.camera_pose.rpy.rx, req.camera_pose.rpy.ry, req.camera_pose.rpy.rz = camera_rotation + req.focus_length = focus_length + req.horizontal_aperture = horizontal_aperture + req.vertical_aperture = vertical_aperture + req.width = width + req.height = height + req.is_local = is_local + response = stub.AddCamera(req) + return (response) + + def InitRobot(self, robot_cfg, robot_usd, scene_usd, init_position=[0, 0, 0], init_rotation=[1, 0, 0, 0], + stand_type="cylinder", stand_size_x=0.1, stand_size_y=0.1): + stub = sim_observation_service_pb2_grpc.SimObservationServiceStub(self.channel) + req = sim_observation_service_pb2.InitRobotReq() + req.robot_cfg_file, req.robot_usd_path, req.scene_usd_path = robot_cfg, robot_usd, scene_usd + req.robot_pose.position.x, req.robot_pose.position.y, req.robot_pose.position.z = init_position + req.robot_pose.rpy.rw, req.robot_pose.rpy.rx, req.robot_pose.rpy.ry, req.robot_pose.rpy.rz = init_rotation + req.stand_type = stand_type + req.stand_size_x = stand_size_x + req.stand_size_y = stand_size_y + response = stub.InitRobot(req) + return (response) + + def DrawLine(self, point_list_1, point_list_2, colors, sizes, name): + stub = sim_observation_service_pb2_grpc.SimObservationServiceStub(self.channel) + req = sim_observation_service_pb2.DrawLineReq() + for point in point_list_1: + _point = sim_observation_service_pb2.Vec3() + _point.x, _point.y, _point.z = point + req.point_list_1.append(_point) + for point in point_list_2: + _point = sim_observation_service_pb2.Vec3() + _point.x, _point.y, _point.z = point + req.point_list_2.append(_point) + for color in colors: + _color = sim_observation_service_pb2.Vec3() + _color.x, _color.y, _color.z = color + req.colors.append(_color) + for size in sizes: + req.sizes.append(size) + req.name = name + response = stub.DrawLine(req) + return (response) + + def ClearLine(self, name): + stub = sim_observation_service_pb2_grpc.SimObservationServiceStub(self.channel) + req = sim_observation_service_pb2.ClearLineReq() + req.name = name + response = stub.ClearLine(req) + return (response) + + def SetObjectPose(self, object_info, joint_cmd, object_joint_info=None): + stub = sim_observation_service_pb2_grpc.SimObservationServiceStub(self.channel) + req = sim_observation_service_pb2.SetObjectPoseReq() + for object in object_info: + object_pose = sim_observation_service_pb2.ObjectPose() + object_pose.prim_path = object["prim_path"] + object_pose.pose.position.x, object_pose.pose.position.y, object_pose.pose.position.z = object["position"] + object_pose.pose.rpy.rw, object_pose.pose.rpy.rx, object_pose.pose.rpy.ry, object_pose.pose.rpy.rz = object[ + "rotation"] + req.object_pose.append(object_pose) + for pos in joint_cmd: + cmd = sim_observation_service_pb2.JointCommand() + cmd.position = pos + req.joint_cmd.append(cmd) + if object_joint_info is not None: + for joint in object_joint_info: + object_joint = sim_observation_service_pb2.ObjectJoint() + object_joint.prim_path = joint["prim_path"] + for pos in joint["joint_cmd"]: + cmd = sim_observation_service_pb2.JointCommand() + cmd.position = pos + object_joint.joint_cmd.append(cmd) + req.object_joint.append(object_joint) + response = stub.SetObjectPose(req) + return (response) + + def SetCameraPose(self, camera_prim_name, camera_pose): + stub = sim_observation_service_pb2_grpc.SimObservationServiceStub(self.channel) + req = sim_observation_service_pb2.SetCameraPoseReq() + req.camera_prim_name = camera_prim_name + req.camera_pose.position.x, req.camera_pose.position.y, req.camera_pose.position.z = camera_pose["position"] + req.camera_pose.rpy.rw, req.camera_pose.rpy.rx, req.camera_pose.rpy.ry, req.camera_pose.rpy.rz = camera_pose[ + "rotation"] + response = stub.SetCameraPose(req) + return (response) + + def SetTrajectoryList(self, trajectory_list, is_block=False): + stub = sim_observation_service_pb2_grpc.SimObservationServiceStub(self.channel) + req = sim_observation_service_pb2.SetTrajectoryListReq() + req.is_block = is_block + for point in trajectory_list: + pose = sim_observation_service_pb2.SE3RpyPose() + pose.position.x, pose.position.y, pose.position.z = point[0] + pose.rpy.rw, pose.rpy.rx, pose.rpy.ry, pose.rpy.rz = point[1] + req.trajectory_point.append(pose) + response = stub.SetTrajectoryList(req) + return response + + def SetTargetPoint(self, position): + stub = sim_object_service_pb2_grpc.SimObjectServiceStub(self.channel) + req = sim_object_service_pb2.SetTargetPointReq() + req.point_position.x, req.point_position.y, req.point_position.z = position + response = stub.SetTargetPoint(req) + return response + + def SetMaterial(self, material_info): + stub = sim_observation_service_pb2_grpc.SimObservationServiceStub(self.channel) + req = sim_observation_service_pb2.SetMaterailReq() + for mat in material_info: + print(mat) + mat_info = sim_observation_service_pb2.MaterialInfo() + mat_info.object_prim = mat["object_prim"] + mat_info.material_name = mat["material_name"] + mat_info.material_path = mat["material_path"] + if "label_name" in mat: + mat_info.label_name = mat["label_name"] + req.materials.append(mat_info) + response = stub.SetMaterial(req) + return (response) + + def SetLight(self, light_info): + stub = sim_observation_service_pb2_grpc.SimObservationServiceStub(self.channel) + req = sim_observation_service_pb2.SetLightReq() + for light in light_info: + print(light) + light_cfg = sim_observation_service_pb2.LightCfg() + light_cfg.light_type = light["light_type"] + light_cfg.light_prim = light["light_prim"] + light_cfg.light_temperature = light["light_temperature"] + light_cfg.light_intensity = light["light_intensity"] + light_cfg.light_rotation.rw, light_cfg.light_rotation.rx, light_cfg.light_rotation.ry, light_cfg.light_rotation.rz = \ + light["rotation"] + light_cfg.light_texture = light["texture"] + req.lights.append(light_cfg) + response = stub.SetLight(req) + return (response) + + +# 调用示例 +def run(): + rpc_client = Rpc_Client(client_host="localhost:50051") + try: + while True: + send_msg = input("input Command: ") + if send_msg == "30": + result = rpc_client.DrawLine( + point_list_1=[[np.random.uniform(-5, 5), np.random.uniform(-5, 5), np.random.uniform(-5, 5)]], + point_list_2=[[np.random.uniform(-5, 5), np.random.uniform(-5, 5), np.random.uniform(-5, 5)]], + colors=[[np.random.uniform(0, 1), np.random.uniform(0, 1), np.random.uniform(0, 1)]], + sizes=[np.random.uniform(0, 1)], + name="test" + ) + if send_msg == "3111": + result = rpc_client.ClearLine(name="test") + if send_msg == "point": + rpc_client.SetTargetPoint([0, 1, 1]) + if send_msg == "test_mat": + rpc_client.SetMaterial([{ + "object_prim": "/World/huxing/a_keting/yangbanjian_A_016", + "material_name": "Ash", + "material_path": "materials/wood/Ash" + }, + { + "object_prim": "/World/huxing/a_Base/Floor_003", + "material_name": "Ash", + "material_path": "materials/wood/Ash" + + }]) + rpc_client.SetLight([{ + "light_type": "Distant", + "light_prim": "/World/huxing/DistantLight", + "light_temperature": 2000, + "light_intensity": 1000, + "rotation": [1, 0.5, 0.5, 0.5], + "texture": "" + }, + { + "light_type": "Dome", + "light_prim": "/World/DomeLight", + "light_temperature": 6500, + "light_intensity": 1000, + "rotation": [1, 0, 0, 0], + "texture": "materials/hdri/abandoned_hall_01_4k.hdr" + }, + { + "light_type": "Rect", + "light_prim": "/World/RectLight", + "light_temperature": 4500, + "light_intensity": 1000, + "rotation": [1, 0, 0, 0], + "texture": "" + }, + { + "light_type": "Rect", + "light_prim": "/World/RectLight_01", + "light_temperature": 8500, + "light_intensity": 1000, + "rotation": [1, 0, 0, 0], + "texture": "" + }]) + + if send_msg == "908": + pose_1 = [1, 1, 1], [1, 0, 0, 0] + pose_2 = [1, 0, 1], [1, 0, 0, 0] + trajecory_list = [pose_1, pose_2] * 10 + result = rpc_client.SetTrajectoryList(trajecory_list, True) + if send_msg == "31": + target_poses = [] + for i in range(1): + # x = np.random.uniform(0.3, 0.7) + # y = np.random.uniform(-0.3, 0.3) + # z = np.random.uniform(0.96, 1.2) + # pose = { + # "position": [x,y,z], + # "rotation": [1,0,0,0] + # } + x = 0.57597359649470348 + y = -0.45669529659303565 + z = 1.0198275517174573 + rw = 0.066194084385260935 + rx = 0.70713063274436749 + ry = 0.70071350186850612 + rz = 0.0677141028599091 + pose = { + "position": [x, y, z], + "rotation": [rw, rx, ry, rz] + } + target_poses.append(pose) + + result = rpc_client.GetIKStatus(target_poses=target_poses, is_right=False) + print(result) + if send_msg == "1": + # 相机 + # A2W + rpc_client.capture_frame(camera_prim_path="/World/Raise_A2/base_link/Head_Camera") + result = rpc_client.capture_semantic_frame(camera_prim_path="/World/Raise_A2/base_link/Head_Camera") + print(result) + if send_msg == "2": + # 臂 + x = np.random.uniform(0.3, 0.7) + y = np.random.uniform(-0.3, 0) + z = np.random.uniform(0.1, 0.5) + rpc_client.moveto(target_position=[x, y, z], target_quaternion=np.array([0.0, -0.0, -1.0, 0.0]), + arm_name="left", is_backend=True, ee_interpolation=True) + if send_msg == "20": + result = rpc_client.MultiPlan(robot_name="left", target_poses=[ + [[0.5169683509992052, 0.1313259611510117, 1.1018942820728093], + [0.40020943, 0.57116637, 0.69704651, -0.16651593]], + [[0.5610938560120418, 0.048608636026916924, 1.0269891277236924], + [0.40020943, 0.57116637, 0.69704651, -0.16651593]], + [[0.5610938560120418, 0.048608636026916924, 1.2269891277236924], + [0.40020943, 0.57116637, 0.69704651, -0.16651593]] + ]) + print(result[0]) + + if send_msg == "44": + import time + # test cabinet + rpc_client.InitRobot("A2D_120s_horizon.json", "A2D.usd", "omnimanip_Simple_Room_01/simple_room.usd", + init_position=[-0.4, 0, -0.55]) + rpc_client.reset() + time.sleep(1) + rpc_client.DetachObj() + rpc_client.reset() + time.sleep(1) + rpc_client.add_object( + usd_path="objects/guanglun/storagefurniture/storagefurniture011/Storagefurniture011.usd", + prim_path="/World/Objects/storagefurniture", label_name="test_storagefurniture", + target_position=np.array([ + 0.64, + -0.3, + 0.40]), target_quaternion=np.array([0., 0., 0.70711, 0.70711]), + target_scale=np.array([1, 1, 1]), color=np.array([1, 0, 1]), + material="Plastic", mass=0.01) + time.sleep(1) + rpc_client.set_gripper_state(gripper_command="open", is_right=True, opened_width=1) + + target_poses = [] + start_time = time.time() + for i in range(50): + x = np.random.uniform(0, 0.2) + y = np.random.uniform(-0.3, 0.3) + z = np.random.uniform(0.96, 1.2) + pose = { + "position": [x, y, z], + "rotation": [1, x, y, z] + } + target_poses.append(pose) + result = rpc_client.GetIKStatus(target_poses=target_poses, is_right=False) + result = rpc_client.GetIKStatus(target_poses=target_poses, is_right=True) + rpc_client.DetachObj() + print("open") + rpc_client.get_object_pose("/World/Objects/storagefurniture/Storagefurniture011_Handle1") + rpc_client.get_object_pose("/World/Objects/storagefurniture/Storagefurniture011_Handle2") + obj_pose = rpc_client.get_object_pose("/World/Objects/storagefurniture/Storagefurniture011_Handle3") + obj_xyz = np.array( + [obj_pose.object_pose.position.x, obj_pose.object_pose.position.y, obj_pose.object_pose.position.z]) + ee_pose = rpc_client.GetEEPose(is_right=True) + ee_quat = np.array( + [ee_pose.ee_pose.rpy.rw, ee_pose.ee_pose.rpy.rx, ee_pose.ee_pose.rpy.ry, ee_pose.ee_pose.rpy.rz]) + goal_xyz = obj_xyz.copy() + goal_xyz[0] = goal_xyz[0] + 0.3 + goal_xyz[2] = goal_xyz[2] + 0.55 + print(goal_xyz) + rpc_client.moveto( + target_position=goal_xyz, + target_quaternion=ee_quat, + arm_name="left", + is_backend=False, + ee_interpolation=False, + distance_frame=0.001) + obj_pose = rpc_client.get_object_pose("/World/Objects/storagefurniture/Storagefurniture011_Handle3") + obj_xyz = np.array( + [obj_pose.object_pose.position.x, obj_pose.object_pose.position.y, obj_pose.object_pose.position.z]) + ee_pose = rpc_client.GetEEPose(is_right=True) + ee_quat = np.array( + [ee_pose.ee_pose.rpy.rw, ee_pose.ee_pose.rpy.rx, ee_pose.ee_pose.rpy.ry, ee_pose.ee_pose.rpy.rz]) + goal_xyz = obj_xyz.copy() + rpc_client.get_object_pose("/World/Objects/storagefurniture/Storagefurniture011_Handle1") + rpc_client.get_object_pose("/World/Objects/storagefurniture/Storagefurniture011_Handle2") + rpc_client.moveto( + target_position=goal_xyz, + target_quaternion=ee_quat, + arm_name="left", + is_backend=True, + ee_interpolation=False, + distance_frame=0.001) + rpc_client.set_gripper_state(gripper_command="close", is_right=True, opened_width=0.02) + time.sleep(1) + obj_pose = rpc_client.get_object_pose("/World/Objects/storagefurniture/Storagefurniture011_Handle3") + obj_xyz = np.array( + [obj_pose.object_pose.position.x, obj_pose.object_pose.position.y, obj_pose.object_pose.position.z]) + ee_pose = rpc_client.GetEEPose(is_right=True) + ee_quat = np.array( + [ee_pose.ee_pose.rpy.rw, ee_pose.ee_pose.rpy.rx, ee_pose.ee_pose.rpy.ry, ee_pose.ee_pose.rpy.rz]) + goal_xyz = obj_xyz.copy() + goal_xyz[0] = goal_xyz[0] - 0.1 + rpc_client.moveto( + target_position=goal_xyz, + target_quaternion=ee_quat, + arm_name="left", + is_backend=True, + ee_interpolation=False, + distance_frame=0.001) + if send_msg == "21": + rpc_client.MultiMove(robot_name="left", plan_index=0) + if send_msg == "22": + rpc_client.MultiMove(robot_name="left", plan_index=1) + if send_msg == "23": + rpc_client.MultiMove(robot_name="left", plan_index=2) + if send_msg == "24": + rpc_client.start_recording() + + if send_msg == "3": + # 关节 + joint_states = rpc_client.get_joint_positions().states + joint_datas = {} + for joint in joint_states: + joint_datas[joint.name] = joint.position + result = rpc_client.GetManipulability(joint_datas) + print(result) + if send_msg == "4": + # 手 + rpc_client.set_gripper_state(gripper_command="open", is_right=True, opened_width=0.1) + if send_msg == "5": + # 物体 + + import time + # rpc_client.reset() + rpc_client.add_object(usd_path="Collected_cabinet_000/cabinet_000.usd", + prim_path="/World/Objects/cabinet", label_name="test_cabinet", + target_position=np.array([.8, 0., -0.1]), + target_quaternion=np.array([0., 0., 0.70711, 0.70711]), + target_scale=np.array([1, 1, 1]), color=np.array([1, 0, 1]), + material="Plastic", mass=0.01) + time.sleep(1) + result = rpc_client.get_object_joint("/World/Objects/cabinet") + print(result) + # x = np.random.uniform(0.2, 0.64) + # y = np.random.uniform(-0.3, 0.3) + # rpc_client.add_object(usd_path="genie3D/09.usd", + # prim_path="/World/Objects/object8", label_name="test7", + # target_position=np.array([np.random.uniform(0.2, 0.64),np.random.uniform(-0.3, 0.3),1.2]), target_quaternion=np.array([0.0,-0.0,-1.0, 0.0]), target_scale=np.array([0.01,0.01,0.01]), color=np.array([1,1,0]), material="Brass", mass=1) + + # rpc_client.add_object(usd_path="genie3D/02/02.usd", + # prim_path="/World/Objects/object2", label_name="test2", + # target_position=np.array([np.random.uniform(0.2, 0.64), np.random.uniform(-0.3, 0.3),1.2]), target_quaternion=np.array([0.0,-0.0,-1.0, 0.0]), target_scale=np.array([0.01,0.01,0.01]), color=np.array([1,1,1])) + # rpc_client.get_object_pose(prim_path="/World/Xform/box_place") + + # rpc_client.add_object(usd_path="genie3D/06/06.usd", + # prim_path="/World/Objects/object6", label_name="test6", + # target_position=np.array([np.random.uniform(0.2, 0.64), np.random.uniform(-0.3, 0.3),1.2]), target_quaternion=np.array([0.0,-0.0,-1.0, 0.0]), target_scale=np.array([0.01,0.01,0.01]), color=np.array([1,0,0])) + # rpc_client.get_object_pose(prim_path="/World/Xform/box_pick_00") + + # rpc_client.add_object(usd_path="genie3D/01/01.usd", + # prim_path="/World/Objects/object1", label_name="test1", + # target_position=np.array([np.random.uniform(0.2, 0.64), np.random.uniform(-0.3, 0.3),1.2]), target_quaternion=np.array([0.0,-0.0,-1.0, 0.0]), target_scale=np.array([0.01,0.01,0.01]), color=np.array([0,0,1])) + # result = rpc_client.get_object_pose(prim_path="/World/Raise_A2_W_T1/head_link/visuals") + if send_msg == "6": + # 获取全部信息 + rpc_client.get_joint_positions() + if send_msg == "7": + rpc_client.set_gripper_state(gripper_command="close", is_right=True, opened_width=0.02) + if send_msg == "8": + result = rpc_client.get_observation( + data_keys={ + 'camera': { + 'camera_prim_list': [ + '/World/Raise_A2/base_link/Head_Camera' + ], + 'render_depth': True, + 'render_semantic': True + }, + 'pose': [ + '/World/Raise_A2/base_link/Head_Camera' + ], + 'joint_position': True, + 'gripper': True + } + ) + print(result["camera"]) + if send_msg == "9": + rpc_client.start_recording(data_keys={ + 'camera': { + 'camera_prim_list': [ + '/World/Raise_A2/base_link/Head_Camera' + ], + 'render_depth': True, + 'render_semantic': True + }, + 'pose': [ + '/World/Raise_A2/base_link/Head_Camera' + ], + 'joint_position': True, + 'gripper': True + }, fps=30, task_name="test" + + ) + time.sleep(1) + rpc_client.stop_recording() + rpc_client.SendTaskStatus(False) + if send_msg == "10": + rpc_client.stop_recording() + if send_msg == "11": + rpc_client.reset() + if send_msg == "13": + result = rpc_client.Exit() + print(result) + + if send_msg == '112': + rpc_client.InitRobot(robot_cfg="Franka.json", robot_usd="Franka/franka.usd", + scene_usd="Pick_Place_Franka_Yellow_Table.usd") + + if send_msg == '113': + position = [1.2, 0., 1.2] + rotation = [0.61237, 0.35355, 0.35355, 0.61237] + rotation = [0.65328, 0.2706, 0.2706, 0.65328] + width = 640 + height = 480 + rpc_client.AddCamera("/World/Sensors/Head_Camera", position, rotation, width, height, 18.14756, 20.955, + 15.2908, False) + response = rpc_client.capture_frame(camera_prim_path="/World/Sensors/Head_Camera") + # cam_info + cam_info = { + "W": response.color_info.width, + "H": response.color_info.height, + "K": np.array( + [ + [response.color_info.fx, 0, response.color_info.ppx], + [0, response.color_info.fy, response.color_info.ppy], + [0, 0, 1], + ] + ), + "scale": 1, + } + # rgb + # rgb = np.frombuffer(response.color_image.data, dtype=np.uint8).reshape(cam_info["H"], cam_info["W"], 4)[ + # :, :, :3 + # ] + # import cv2 + # cv2.imwrite("head_img.png", rgb) + + if send_msg == '114': + position = [0.05, 0., 0.] + rotation = [0.06163, 0.70442, 0.70442, 0.06163] + width = 640 + height = 480 + rpc_client.AddCamera("/panda/panda_hand/Hand_Camera_1", position, rotation, width, height, 18.14756, + 20.955, 15.2908, True) + response = rpc_client.capture_frame(camera_prim_path="/panda/panda_hand/Hand_Camera_1") + # cam_info + cam_info = { + "W": response.color_info.width, + "H": response.color_info.height, + "K": np.array( + [ + [response.color_info.fx, 0, response.color_info.ppx], + [0, response.color_info.fy, response.color_info.ppy], + [0, 0, 1], + ] + ), + "scale": 1, + } + # rgb + rgb = np.frombuffer(response.color_image.data, dtype=np.uint8).reshape(cam_info["H"], cam_info["W"], 4)[ + :, :, :3 + ] + import cv2 + cv2.imwrite("hand_img.png", rgb) + + except Exception as e: + print("failed.{}".format(e)) + return False + + +if __name__ == '__main__': + run() \ No newline at end of file diff --git a/data_gen_dependencies/data_generate.py b/data_gen_dependencies/data_generate.py index e644975..95ceb5b 100644 --- a/data_gen_dependencies/data_generate.py +++ b/data_gen_dependencies/data_generate.py @@ -1,23 +1,30 @@ +import json -def generate_data(): +from data_gen_dependencies.omni_robot import IsaacSimRpcRobot +from data_gen_dependencies.omniagent import Agent +def generate_data(task_file, client_host, use_recording=True): + task = json.load(open(task_file)) + robot_cfg = task["robot"]["robot_cfg"] + stand = task["robot"]["stand"] + robot_position = task["robot"]["init_position"] + robot_rotation = task["robot"]["init_rotation"] + scene_usd = task["scene_usd"] - robot = IsaacSimRpcRobot(robot_cfg=robot_cfg, scene_usd=task_info['scene']['scene_usd'], - client_host=args.client_host, + robot = IsaacSimRpcRobot(robot_cfg=robot_cfg, scene_usd=scene_usd, + client_host=client_host, position=robot_position, rotation=robot_rotation, stand_type=stand["stand_type"], stand_size_x=stand["stand_size_x"], stand_size_y=stand["stand_size_y"]) - # planner = ManipulationPlanner() - agent = Agent(robot, None) + agent = Agent(robot) render_semantic = False - if "render_semantic" in task_info["recording_setting"]: - render_semantic = task_info["recording_setting"]["render_semantic"] - agent.run(task_folder=task_folder, - camera_list=task_info["recording_setting"]["camera_list"], - use_recording=args.use_recording, - workspaces=task_info['scene']['function_space_objects'], - fps=task_info["recording_setting"]["fps"], + if "render_semantic" in task["recording_setting"]: + render_semantic = task["recording_setting"]["render_semantic"] + agent.run(task_file=task_file, + camera_list=task["recording_setting"]["camera_list"], + use_recording=use_recording, + workspaces=task['scene']['function_space_objects'], + fps=task["recording_setting"]["fps"], render_semantic=render_semantic, - ) print("job done") robot.client.Exit() \ No newline at end of file diff --git a/data_gen_dependencies/data_utils.py b/data_gen_dependencies/data_utils.py new file mode 100644 index 0000000..097c84e --- /dev/null +++ b/data_gen_dependencies/data_utils.py @@ -0,0 +1,30 @@ +""" Tools for data processing. + Author: chenxi-wang +""" + +import numpy as np +from scipy.spatial.transform import Rotation as R + + +def pose_difference(pose1, pose2): + # 提取位置 + position1 = pose1[:3, 3] + position2 = pose2[:3, 3] + + # 计算位置的欧式距离 + position_distance = np.linalg.norm(position1 - position2) + + # 提取旋转矩阵 + rotation1 = pose1[:3, :3] + rotation2 = pose2[:3, :3] + + # 计算旋转矩阵的角度差 + r1 = R.from_matrix(rotation1) + r2 = R.from_matrix(rotation2) + + # 计算旋转差 + relative_rotation = r1.inv() * r2 + angle_difference = relative_rotation.magnitude() + + return position_distance, np.degrees(angle_difference) + diff --git a/data_gen_dependencies/fix_rotation.py b/data_gen_dependencies/fix_rotation.py new file mode 100644 index 0000000..91988f8 --- /dev/null +++ b/data_gen_dependencies/fix_rotation.py @@ -0,0 +1,306 @@ +import numpy as np +from sklearn.linear_model import RANSACRegressor +from scipy.spatial.transform import Rotation as R, Slerp + + +def interpolate_rotation_matrices(rot_matrix1, rot_matrix2, num_interpolations): + # Convert the rotation matrices to rotation objects + rot1 = R.from_matrix(rot_matrix1) + rot2 = R.from_matrix(rot_matrix2) + + # Convert the rotation objects to quaternions + quat1 = rot1.as_quat() + quat2 = rot2.as_quat() + + # Define the times of the known rotations + times = [0, 1] + + # Create the Slerp object + slerp = Slerp(times, R.from_quat([quat1, quat2])) + + # Define the times of the interpolations + interp_times = np.linspace(0, 1, num_interpolations) + + # Perform the interpolation + interp_rots = slerp(interp_times) + + # Convert the interpolated rotations to matrices + interp_matrices = interp_rots.as_matrix() + + return interp_matrices + + +def is_y_axis_up(pose_matrix): + """ + 判断物体在世界坐标系中的 y 轴是否朝上。 + + 参数: + pose_matrix (numpy.ndarray): 4x4 齐次变换矩阵。 + + 返回: + bool: True 如果 y 轴朝上,False 如果 y 轴朝下。 + """ + # 提取旋转矩阵的第二列 + y_axis_vector = pose_matrix[:3, 1] + + # 世界坐标系的 y 轴 + world_y_axis = np.array([0, 1, 0]) + + # 计算点积 + dot_product = np.dot(y_axis_vector, world_y_axis) + + # 返回 True 如果朝上,否则返回 False + return dot_product > 0 + + +def is_local_axis_facing_world_axis(pose_matrix, local_axis='y', world_axis='z'): + # 定义局部坐标系的轴索引 + local_axis_index = {'x': 0, 'y': 1, 'z': 2} + + # 定义世界坐标系的轴向量 + world_axes = { + 'x': np.array([1, 0, 0]), + 'y': np.array([0, 1, 0]), + 'z': np.array([0, 0, 1]) + } + + # 提取局部坐标系的指定轴 + local_axis_vector = pose_matrix[:3, local_axis_index[local_axis]] + + # 获取世界坐标系的指定轴向量 + world_axis_vector = world_axes[world_axis] + + # 计算点积 + dot_product = np.dot(local_axis_vector, world_axis_vector) + + # 返回 True 如果朝向指定世界轴,否则返回 False + return dot_product > 0 + + +def fix_gripper_rotation(source_affine, target_affine, rot_axis='z'): + ''' + gripper是对称结构,绕Z轴旋转180度前后是等效的。选择离当前pose更近的target pose以避免不必要的旋转 + ''' + if rot_axis == 'z': + R_180 = np.array([[-1, 0, 0], + [0, -1, 0], + [0, 0, 1]]) + elif rot_axis == 'y': + R_180 = np.array([[-1, 0, 0], + [0, 1, 0], + [0, 0, -1]]) + elif rot_axis == 'x': + R_180 = np.array([[1, 0, 0], + [0, -1, 0], + [0, 0, -1]]) + else: + assert False, "Invalid rotation axis. Please choose from 'x', 'y', 'z'." + + # 提取source和target的旋转部分(3x3矩阵) + source_rotation = source_affine[:3, :3] + target_rotation = target_affine[:3, :3] + # 将target_rotation绕其自身的Z轴转180度,得到target_rotation_2 + target_rotation_2 = np.dot(target_rotation, R_180) + + # 定义一个函数来计算两个旋转矩阵之间的距离 + def rotation_matrix_distance(R1, R2): + # 使用奇异值分解来计算两个旋转矩阵之间的距离 + U, _, Vh = np.linalg.svd(np.dot(R1.T, R2)) + # 确保旋转矩阵的行列式为1,即旋转而不是反射 + det_check = np.linalg.det(U) * np.linalg.det(Vh) + if det_check < 0: + Vh = -Vh + return np.arccos(np.trace(Vh) / 2) + + # 计算source_rotation与target_rotation之间的距离 + distance_target_rotation = rotation_matrix_distance(source_rotation, target_rotation) + # 计算source_rotation与target_rotation_2之间的距离 + distance_target_rotation_2 = rotation_matrix_distance(source_rotation, target_rotation_2) + # 比较两个距离,确定哪个旋转更接近source_rotation + if distance_target_rotation < distance_target_rotation_2: + return target_affine + else: + # 重新组合旋转矩阵target_rotation_2和原始的平移部分 + target_affine_2 = np.eye(4) + target_affine_2[:3, :3] = target_rotation_2 + target_affine_2[:3, 3] = target_affine[:3, 3] # 保留原始的平移部分 + return target_affine_2 + + +def rotate_180_along_axis(pose, rot_axis='z'): + ''' + gripper是对称结构,绕Z轴旋转180度前后是等效的。选择离当前pose更近的target pose以避免不必要的旋转 + ''' + if rot_axis == 'z': + R_180 = np.array([[-1, 0, 0], + [0, -1, 0], + [0, 0, 1]]) + elif rot_axis == 'y': + R_180 = np.array([[-1, 0, 0], + [0, 1, 0], + [0, 0, -1]]) + elif rot_axis == 'x': + R_180 = np.array([[1, 0, 0], + [0, -1, 0], + [0, 0, -1]]) + else: + assert False, "Invalid rotation axis. Please choose from 'x', 'y', 'z'." + + single_mode = pose.ndim == 2 + if single_mode: + pose = pose[np.newaxis, :, :] + R_180 = np.tile(R_180[np.newaxis, :, :], (pose.shape[0], 1, 1)) + pose[:, :3, :3] = pose[:, :3, :3] @ R_180 + + if single_mode: + pose = pose[0] + + return pose + + +def translate_along_axis(pose, shift, axis='z', use_local=True): + ''' + 根据指定的角度和旋转轴来旋转target_affine。 + 参数: + - target_affine: 4x4 仿射变换矩阵 + - angle_degrees: 旋转角度(以度为单位) + - rot_axis: 旋转轴,'x'、'y' 或 'z' + ''' + pose = pose.copy() + translation = np.zeros(3) + if axis == 'z': + translation[2] = shift + elif axis == 'y': + translation[1] = shift + elif axis == 'x': + translation[0] = shift + if len(pose.shape) == 3: + for i in range(pose.shape[0]): + if use_local: + pose[i][:3, 3] += pose[i][:3, :3] @ translation + else: + pose[i][:3, 3] += translation + else: + if use_local: + pose[:3, 3] += pose[:3, :3] @ translation + else: + pose[:3, 3] += translation + + return pose + + +def rotate_along_axis(target_affine, angle_degrees, rot_axis='z', use_local=True): + ''' + 根据指定的角度和旋转轴来旋转target_affine。 + 参数: + - target_affine: 4x4 仿射变换矩阵 + - angle_degrees: 旋转角度(以度为单位) + - rot_axis: 旋转轴,'x'、'y' 或 'z' + ''' + # 将角度转换为弧度 + angle_radians = np.deg2rad(angle_degrees) + + # 创建旋转对象 + if rot_axis == 'z': + rotation_vector = np.array([0, 0, angle_radians]) + elif rot_axis == 'y': + rotation_vector = np.array([0, angle_radians, 0]) + elif rot_axis == 'x': + rotation_vector = np.array([angle_radians, 0, 0]) + else: + raise ValueError("Invalid rotation axis. Please choose from 'x', 'y', 'z'.") + + # 生成旋转矩阵 + R_angle = R.from_rotvec(rotation_vector).as_matrix() + + # 提取旋转部分(3x3矩阵) + target_rotation = target_affine[:3, :3] + + # 将 target_rotation 绕指定轴旋转指定角度,得到 target_rotation_2 + if use_local: + target_rotation_2 = np.dot(target_rotation, R_angle) + else: + target_rotation_2 = np.dot(R_angle, target_rotation) + + # 重新组合旋转矩阵 target_rotation_2 和原始的平移部分 + target_affine_2 = np.eye(4) + target_affine_2[:3, :3] = target_rotation_2 + target_affine_2[:3, 3] = target_affine[:3, 3] # 保留原始的平移部分 + + return target_affine_2 + + +def rotation_matrix_to_quaternion(R): + assert R.shape == (3, 3) + + # 计算四元数分量 + trace = np.trace(R) + if trace > 0: + S = np.sqrt(trace + 1.0) * 2 # S=4*qw + qw = 0.25 * S + qx = (R[2, 1] - R[1, 2]) / S + qy = (R[0, 2] - R[2, 0]) / S + qz = (R[1, 0] - R[0, 1]) / S + elif (R[0, 0] > R[1, 1]) and (R[0, 0] > R[2, 2]): + S = np.sqrt(1.0 + R[0, 0] - R[1, 1] - R[2, 2]) * 2 # S=4*qx + qw = (R[2, 1] - R[1, 2]) / S + qx = 0.25 * S + qy = (R[0, 1] + R[1, 0]) / S + qz = (R[0, 2] + R[2, 0]) / S + elif R[1, 1] > R[2, 2]: + S = np.sqrt(1.0 + R[1, 1] - R[0, 0] - R[2, 2]) * 2 # S=4*qy + qw = (R[0, 2] - R[2, 0]) / S + qx = (R[0, 1] + R[1, 0]) / S + qy = 0.25 * S + qz = (R[1, 2] + R[2, 1]) / S + else: + S = np.sqrt(1.0 + R[2, 2] - R[0, 0] - R[1, 1]) * 2 # S=4*qz + qw = (R[1, 0] - R[0, 1]) / S + qx = (R[0, 2] + R[2, 0]) / S + qy = (R[1, 2] + R[2, 1]) / S + qz = 0.25 * S + + return np.array([qw, qx, qy, qz]) + + +def quat_wxyz_to_rotation_matrix(quat): + qw, qx, qy, qz = quat + return np.array([ + [1 - 2 * qy ** 2 - 2 * qz ** 2, 2 * qx * qy - 2 * qz * qw, 2 * qx * qz + 2 * qy * qw], + [2 * qx * qy + 2 * qz * qw, 1 - 2 * qx ** 2 - 2 * qz ** 2, 2 * qy * qz - 2 * qx * qw], + [2 * qx * qz - 2 * qy * qw, 2 * qy * qz + 2 * qx * qw, 1 - 2 * qx ** 2 - 2 * qy ** 2] + ]) + + +def estimate_affine_3d_fixed_scale(src_points, dst_points): + ransac = RANSACRegressor() + ransac.fit(src_points, dst_points) + inlier_mask = ransac.inlier_mask_ + + src = src_points[inlier_mask] + dst = dst_points[inlier_mask] + + # Normalize the input points to ensure uniform scaling + src_mean = np.mean(src, axis=0) + dst_mean = np.mean(dst, axis=0) + src_centered = src - src_mean + dst_centered = dst - dst_mean + + # Estimate rotation using singular value decomposition (SVD) + U, _, Vt = np.linalg.svd(np.dot(dst_centered.T, src_centered)) + R_est = np.dot(U, Vt) + + # Ensure a right-handed coordinate system + if np.linalg.det(R_est) < 0: + Vt[2, :] *= -1 + R_est = np.dot(U, Vt) + + # Calculate the uniform scale + scale = np.sum(dst_centered * (R_est @ src_centered.T).T) / np.sum(src_centered ** 2) + + # Construct the affine transformation matrix + transform = np.eye(4) + transform[:3, :3] = scale * R_est + transform[:3, 3] = dst_mean - scale * R_est @ src_mean + + return transform \ No newline at end of file diff --git a/data_gen_dependencies/gripper_width_120s_fixed.json b/data_gen_dependencies/gripper_width_120s_fixed.json new file mode 100644 index 0000000..8dc3fdb --- /dev/null +++ b/data_gen_dependencies/gripper_width_120s_fixed.json @@ -0,0 +1,502 @@ +[ + { + "angel": 0.0, + "width": 0.0026608705520629883, + "depth": 0.01770174503326416 + }, + { + "angel": 0.01, + "width": 0.0039532482624053955, + "depth": 0.01782834529876709 + }, + { + "angel": 0.02, + "width": 0.005013704299926758, + "depth": 0.017954230308532715 + }, + { + "angel": 0.03, + "width": 0.00616687536239624, + "depth": 0.01803898811340332 + }, + { + "angel": 0.04, + "width": 0.007292896509170532, + "depth": 0.018111586570739746 + }, + { + "angel": 0.05, + "width": 0.008423537015914917, + "depth": 0.018180370330810547 + }, + { + "angel": 0.06, + "width": 0.009554654359817505, + "depth": 0.018243789672851562 + }, + { + "angel": 0.07, + "width": 0.010687708854675293, + "depth": 0.01830136775970459 + }, + { + "angel": 0.08, + "width": 0.01182129979133606, + "depth": 0.01835334300994873 + }, + { + "angel": 0.09, + "width": 0.01295548677444458, + "depth": 0.018399715423583984 + }, + { + "angel": 0.1, + "width": 0.014090895652770996, + "depth": 0.018440604209899902 + }, + { + "angel": 0.11, + "width": 0.015226364135742188, + "depth": 0.018476247787475586 + }, + { + "angel": 0.12, + "width": 0.016361743211746216, + "depth": 0.01850605010986328 + }, + { + "angel": 0.13, + "width": 0.0174979567527771, + "depth": 0.018530607223510742 + }, + { + "angel": 0.14, + "width": 0.018633395433425903, + "depth": 0.018549561500549316 + }, + { + "angel": 0.15, + "width": 0.01976907253265381, + "depth": 0.018563032150268555 + }, + { + "angel": 0.16, + "width": 0.02090352773666382, + "depth": 0.018570780754089355 + }, + { + "angel": 0.17, + "width": 0.022038191556930542, + "depth": 0.018573284149169922 + }, + { + "angel": 0.18, + "width": 0.023171991109848022, + "depth": 0.01857006549835205 + }, + { + "angel": 0.19, + "width": 0.024304866790771484, + "depth": 0.01856088638305664 + }, + { + "angel": 0.2, + "width": 0.02543732523918152, + "depth": 0.018546223640441895 + }, + { + "angel": 0.21, + "width": 0.026568621397018433, + "depth": 0.01852583885192871 + }, + { + "angel": 0.22, + "width": 0.027698159217834473, + "depth": 0.018499255180358887 + }, + { + "angel": 0.23, + "width": 0.02882552146911621, + "depth": 0.018466830253601074 + }, + { + "angel": 0.24, + "width": 0.02995222806930542, + "depth": 0.018428564071655273 + }, + { + "angel": 0.25, + "width": 0.031076818704605103, + "depth": 0.018383502960205078 + }, + { + "angel": 0.26, + "width": 0.03220018744468689, + "depth": 0.01833212375640869 + }, + { + "angel": 0.27, + "width": 0.03332057595252991, + "depth": 0.018274307250976562 + }, + { + "angel": 0.28, + "width": 0.034440696239471436, + "depth": 0.01820969581604004 + }, + { + "angel": 0.29, + "width": 0.03555762767791748, + "depth": 0.01813817024230957 + }, + { + "angel": 0.3, + "width": 0.03667446970939636, + "depth": 0.018059611320495605 + }, + { + "angel": 0.31, + "width": 0.03778964281082153, + "depth": 0.017974019050598145 + }, + { + "angel": 0.32, + "width": 0.0389028936624527, + "depth": 0.017881393432617188 + }, + { + "angel": 0.33, + "width": 0.040014833211898804, + "depth": 0.017781853675842285 + }, + { + "angel": 0.34, + "width": 0.04112696647644043, + "depth": 0.01767587661743164 + }, + { + "angel": 0.35, + "width": 0.04223787784576416, + "depth": 0.0175631046295166 + }, + { + "angel": 0.36, + "width": 0.0433482825756073, + "depth": 0.017444252967834473 + }, + { + "angel": 0.37, + "width": 0.04445780813694, + "depth": 0.017319321632385254 + }, + { + "angel": 0.38, + "width": 0.04556658864021301, + "depth": 0.017188549041748047 + }, + { + "angel": 0.39, + "width": 0.04667462408542633, + "depth": 0.017052769660949707 + }, + { + "angel": 0.4, + "width": 0.04778085649013519, + "depth": 0.016911983489990234 + }, + { + "angel": 0.41, + "width": 0.048885777592659, + "depth": 0.01676619052886963 + }, + { + "angel": 0.42, + "width": 0.04998904466629028, + "depth": 0.016615867614746094 + }, + { + "angel": 0.43, + "width": 0.05108833312988281, + "depth": 0.016460537910461426 + }, + { + "angel": 0.44, + "width": 0.052185386419296265, + "depth": 0.01630115509033203 + }, + { + "angel": 0.45, + "width": 0.05327913165092468, + "depth": 0.016136527061462402 + }, + { + "angel": 0.46, + "width": 0.05436857044696808, + "depth": 0.015967607498168945 + }, + { + "angel": 0.47, + "width": 0.05545388162136078, + "depth": 0.015793800354003906 + }, + { + "angel": 0.48, + "width": 0.056535109877586365, + "depth": 0.015615224838256836 + }, + { + "angel": 0.49, + "width": 0.057611629366874695, + "depth": 0.015431761741638184 + }, + { + "angel": 0.5, + "width": 0.058684349060058594, + "depth": 0.015242815017700195 + }, + { + "angel": 0.51, + "width": 0.05975167453289032, + "depth": 0.015048980712890625 + }, + { + "angel": 0.52, + "width": 0.06081554293632507, + "depth": 0.014850020408630371 + }, + { + "angel": 0.53, + "width": 0.061873987317085266, + "depth": 0.014646053314208984 + }, + { + "angel": 0.54, + "width": 0.06292988359928131, + "depth": 0.01443624496459961 + }, + { + "angel": 0.55, + "width": 0.06398084759712219, + "depth": 0.014221429824829102 + }, + { + "angel": 0.56, + "width": 0.06502681970596313, + "depth": 0.014001250267028809 + }, + { + "angel": 0.57, + "width": 0.06606826186180115, + "depth": 0.01377558708190918 + }, + { + "angel": 0.58, + "width": 0.06710587441921234, + "depth": 0.013544917106628418 + }, + { + "angel": 0.59, + "width": 0.06813894212245941, + "depth": 0.01330876350402832 + }, + { + "angel": 0.6, + "width": 0.06916701793670654, + "depth": 0.013067483901977539 + }, + { + "angel": 0.61, + "width": 0.0701913833618164, + "depth": 0.012820601463317871 + }, + { + "angel": 0.62, + "width": 0.0712103396654129, + "depth": 0.012568473815917969 + }, + { + "angel": 0.63, + "width": 0.07222408056259155, + "depth": 0.012310981750488281 + }, + { + "angel": 0.64, + "width": 0.07323294878005981, + "depth": 0.012048125267028809 + }, + { + "angel": 0.65, + "width": 0.07423663139343262, + "depth": 0.011780023574829102 + }, + { + "angel": 0.66, + "width": 0.07523514330387115, + "depth": 0.011506795883178711 + }, + { + "angel": 0.67, + "width": 0.07622846961021423, + "depth": 0.011228084564208984 + }, + { + "angel": 0.68, + "width": 0.07721598446369171, + "depth": 0.010944008827209473 + }, + { + "angel": 0.69, + "width": 0.07819811999797821, + "depth": 0.01065516471862793 + }, + { + "angel": 0.7, + "width": 0.07917441427707672, + "depth": 0.010360479354858398 + }, + { + "angel": 0.71, + "width": 0.08014515042304993, + "depth": 0.010060548782348633 + }, + { + "angel": 0.72, + "width": 0.08111031353473663, + "depth": 0.009755849838256836 + }, + { + "angel": 0.73, + "width": 0.0820685476064682, + "depth": 0.009445548057556152 + }, + { + "angel": 0.74, + "width": 0.08302123844623566, + "depth": 0.009129881858825684 + }, + { + "angel": 0.75, + "width": 0.0839679092168808, + "depth": 0.008809328079223633 + }, + { + "angel": 0.76, + "width": 0.08490841090679169, + "depth": 0.008483290672302246 + }, + { + "angel": 0.77, + "width": 0.08584237098693848, + "depth": 0.008152365684509277 + }, + { + "angel": 0.78, + "width": 0.08676967024803162, + "depth": 0.007816195487976074 + }, + { + "angel": 0.79, + "width": 0.08769077062606812, + "depth": 0.007474780082702637 + }, + { + "angel": 0.8, + "width": 0.08860553801059723, + "depth": 0.007128596305847168 + }, + { + "angel": 0.81, + "width": 0.08951263129711151, + "depth": 0.006777524948120117 + }, + { + "angel": 0.82, + "width": 0.09041430056095123, + "depth": 0.006421804428100586 + }, + { + "angel": 0.83, + "width": 0.09130872786045074, + "depth": 0.006061434745788574 + }, + { + "angel": 0.84, + "width": 0.09219725430011749, + "depth": 0.005696535110473633 + }, + { + "angel": 0.85, + "width": 0.0930781364440918, + "depth": 0.005327463150024414 + }, + { + "angel": 0.86, + "width": 0.0939527302980423, + "depth": 0.0049550533294677734 + }, + { + "angel": 0.87, + "width": 0.09482064843177795, + "depth": 0.004578828811645508 + }, + { + "angel": 0.88, + "width": 0.09568147361278534, + "depth": 0.004199862480163574 + }, + { + "angel": 0.89, + "width": 0.09653639793395996, + "depth": 0.0038175582885742188 + }, + { + "angel": 0.9, + "width": 0.09738385677337646, + "depth": 0.003433823585510254 + }, + { + "angel": 0.91, + "width": 0.09822401404380798, + "depth": 0.0030478239059448242 + }, + { + "angel": 0.92, + "width": 0.09905855357646942, + "depth": 0.0026608705520629883 + }, + { + "angel": 0.93, + "width": 0.09988570213317871, + "depth": 0.0022735595703125 + }, + { + "angel": 0.94, + "width": 0.10070507228374481, + "depth": 0.0018857717514038086 + }, + { + "angel": 0.95, + "width": 0.10151737928390503, + "depth": 0.0014983415603637695 + }, + { + "angel": 0.96, + "width": 0.10232140123844147, + "depth": 0.0011110305786132812 + }, + { + "angel": 0.97, + "width": 0.10311749577522278, + "depth": 0.0007239580154418945 + }, + { + "angel": 0.98, + "width": 0.10392564535140991, + "depth": 0.00033295154571533203 + }, + { + "angel": 0.99, + "width": 0.10468029975891113, + "depth": 0.0 + } +] \ No newline at end of file diff --git a/data_gen_dependencies/manip_solver.py b/data_gen_dependencies/manip_solver.py new file mode 100644 index 0000000..4b40884 --- /dev/null +++ b/data_gen_dependencies/manip_solver.py @@ -0,0 +1,657 @@ +# -*- coding: utf-8 -*- +import copy +import json +import os +import time + +import trimesh + +curPath = os.path.abspath(os.path.dirname(__file__)) +rootPath = os.path.split(curPath)[0] +import numpy as np + +from data_gen_dependencies.transforms import calculate_rotation_matrix, rotate_around_axis +from data_gen_dependencies.object import OmniObject, transform_coordinates_3d +from data_gen_dependencies.fix_rotation import rotate_along_axis + +from data_gen_dependencies.action import build_stage + +from scipy.interpolate import interp1d +from scipy.spatial.transform import Rotation as R + + +def draw_axis(robot, pose): + if pose.shape[0] == 4 and pose.shape[1] == 4: + pose = np.concatenate([pose[:3, 3], R.from_matrix(pose[:3, :3]).as_euler('xyz')]) + + def generate_orthogonal_lines(point, angles, scale): + assert len(point) == 3 and len(angles) == 3 + + # Define rotation matrices for each axis + Rx = np.array([[1, 0, 0], + [0, np.cos(angles[0]), -np.sin(angles[0])], + [0, np.sin(angles[0]), np.cos(angles[0])]]) + + Ry = np.array([[np.cos(angles[1]), 0, np.sin(angles[1])], + [0, 1, 0], + [-np.sin(angles[1]), 0, np.cos(angles[1])]]) + + Rz = np.array([[np.cos(angles[2]), -np.sin(angles[2]), 0], + [np.sin(angles[2]), np.cos(angles[2]), 0], + [0, 0, 1]]) + + # Combined rotation matrix + R = Rz @ Ry @ Rx + + # Define the unit vectors for the axes + unit_vectors = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]) + + # Rotate the unit vectors + rotated_vectors = R @ unit_vectors.T + + # Draw the lines + lines = [] + for vec in rotated_vectors.T: + start_point = point + end_point = point + vec * scale + lines.append(np.concatenate([start_point, end_point])) + return np.stack(lines) + + draw_colors = [ + [0, 0, 1], + [0, 1, 0], + [1, 0, 0], + ] + line_sizes = [1, 1, 1] + xyz_axis = generate_orthogonal_lines(point=pose[:3], angles=pose[3:], scale=0.1) + start_point = xyz_axis[:, :3] + end_point = xyz_axis[:, 3:] + robot.client.DrawLine(start_point, end_point, draw_colors, line_sizes) + + +def load_gripper_width_interp_func(file): + data = json.load(open(file, 'r')) + + digits = [] + widths = [] + depths = [] + for frame in data: + digits.append(frame["angel"]) + widths.append(frame["width"]) + depths.append(frame["depth"]) + digits = np.array(digits) + widths = np.array(widths) + depths = np.array(depths) + + func_width2depth = interp1d(widths, depths, kind="linear", fill_value="extrapolate") + return func_width2depth + + +func_width2depth = load_gripper_width_interp_func(os.path.dirname(__file__) + "/gripper_width_120s_fixed.json") + + +def format_object(obj, distance, type='active'): + if obj is None: + return None + xyz, direction = obj.xyz, obj.direction + + direction = direction / np.linalg.norm(direction) * distance + type = type.lower() + if type == 'active': + xyz_start = xyz + xyz_end = xyz_start + direction + elif type == 'passive' or type == 'plane': + xyz_end = xyz + xyz_start = xyz_end - direction + + part2obj = np.eye(4) + part2obj[:3, 3] = xyz_start + obj.obj2part = np.linalg.inv(part2obj) + + obj_info = { + 'pose': obj.obj_pose, + 'length': obj.obj_length, + 'xyz_start': xyz_start, + 'xyz_end': xyz_end, + 'obj2part': obj.obj2part + } + return obj_info + + +def obj2world(obj_info): + obj_pose = obj_info['pose'] + obj_length = obj_info['length'] + obj2part = obj_info['obj2part'] + xyz_start = obj_info['xyz_start'] + xyz_end = obj_info['xyz_end'] + + arrow_in_obj = np.array([xyz_start, xyz_end]).transpose(1, 0) + arrow_in_world = transform_coordinates_3d(arrow_in_obj, obj_pose).transpose(1, 0) + + xyz_start_world, xyz_end_world = arrow_in_world + direction_world = xyz_end_world - xyz_start_world + direction_world = direction_world / np.linalg.norm(direction_world) + + obj_info_world = { + 'pose': obj_pose, + 'length': obj_length, + 'obj2part': obj2part, + 'xyz_start': xyz_start_world, + 'xyz_end': xyz_end_world, + 'direction': direction_world, + + } + return obj_info_world + + +def get_aligned_fix_pose(active_obj, passive_obj, distance=0.01, N=1): + try: + active_object = format_object(active_obj, type='active', distance=distance) + passive_object = format_object(passive_obj, type='passive', distance=distance) + except: + print('error') + active_obj_world = obj2world(active_object) + current_obj_pose = active_obj_world['pose'] + if passive_object is None: + return current_obj_pose[np.newaxis, ...] + + passive_obj_world = obj2world(passive_object) + passive_obj_world['direction'] = passive_obj.direction + + R = calculate_rotation_matrix(active_obj_world['direction'], passive_obj_world['direction']) + T = passive_obj_world['xyz_end'] - R @ active_obj_world['xyz_start'] + transform_matrix = np.eye(4) + transform_matrix[:3, :3] = R + transform_matrix[:3, 3] = T + target_obj_pose = transform_matrix @ current_obj_pose + target_obj_pose[:3, 3] = passive_obj.obj_pose[:3, 3] + + poses = [] + for angle in [i * 360 / N for i in range(N)]: + pose_rotated = rotate_around_axis( + target_obj_pose, + passive_obj_world['xyz_start'], + passive_obj_world['direction'], + angle) + poses.append(pose_rotated) + return np.stack(poses) + + +def get_aligned_pose(active_obj, passive_obj, distance=0.01, N=1): + try: + active_object = format_object(active_obj, type='active', distance=distance) + passive_object = format_object(passive_obj, type='passive', distance=distance) + except: + print('error') + + active_obj_world = obj2world(active_object) + current_obj_pose = active_obj_world['pose'] + if passive_object is None: + return current_obj_pose[np.newaxis, ...] + + passive_obj_world = obj2world(passive_object) + + R = calculate_rotation_matrix(active_obj_world['direction'], passive_obj_world['direction']) + T = passive_obj_world['xyz_end'] - R @ active_obj_world['xyz_start'] + transform_matrix = np.eye(4) + transform_matrix[:3, :3] = R + transform_matrix[:3, 3] = T + target_obj_pose = transform_matrix @ current_obj_pose + + poses = [] + for angle in [i * 360 / N for i in range(N)]: + pose_rotated = rotate_around_axis( + target_obj_pose, + passive_obj_world['xyz_start'], + passive_obj_world['direction'], + angle) + poses.append(pose_rotated) + return np.stack(poses) + + +def load_task_solution(task_info): + # task_info = json.load(open('%s/task_info.json'%task_dir, 'rb')) + stages = task_info['stages'] + + objects = { + 'gripper': OmniObject('gripper') + } + + for obj_info in task_info['objects']: + obj_id = obj_info['object_id'] + if obj_id == 'fix_pose': + obj = OmniObject('fix_pose') + if 'position' not in obj_info or 'direction' not in obj_info: + print(f"Error: Missing position/direction in object {obj_id}") + continue + obj.set_pose(np.array(obj_info['position']), np.array([0.001, 0.001, 0.001])) + obj.elements = { + 'active': {}, + 'passive': { + 'place': { + 'default': [ + { + 'xyz': np.array([0, 0, 0]), + 'direction': np.array(obj_info['direction']) + } + ] + } + } + } + objects[obj_id] = obj + else: + obj_dir = obj_info["data_info_dir"] + obj = OmniObject.from_obj_dir(obj_dir, obj_info=obj_info) + objects[obj_id] = obj + + if hasattr(obj, 'part_ids'): + if hasattr(obj, 'part_joint_limits') and obj.part_joint_limits is not None: + obj_parts_joint_limits = obj.part_joint_limits + for part_id in obj.part_ids: + id = obj_id + '/%s' % part_id + objects[id] = copy.deepcopy(obj) + objects[id].name = id + objects[id].part_joint_limit = obj_parts_joint_limits[part_id] + if len(obj.part_ids): + del objects[obj_id] + return stages, objects + + +def parse_stage(stage, objects): + action = stage['action'] + if action in ['reset']: + return action, 'gripper', 'gripper', None, None, None, None + active_obj_id = stage['active']['object_id'] + if 'part_id' in stage['active']: + active_obj_id += '/%s' % stage['active']['part_id'] + + passive_obj_id = stage['passive']['object_id'] + if 'part_id' in stage['passive']: + passive_obj_id += '/%s' % stage['passive']['part_id'] + + active_obj = objects[active_obj_id] + passive_obj = objects[passive_obj_id] + + single_obj = action in ['pull', 'rotate', 'slide', 'shave', 'brush', 'wipe'] + + def _load_element(obj, type): + if action in ['pick', 'hook']: + action_mapped = 'grasp' + else: + action_mapped = action + if action_mapped == 'grasp' and type == 'active': + return None, None + elif obj.name == 'gripper': + element = obj.elements[type][action_mapped] + return element, 'default' + primitive = stage[type]['primitive'] if stage[type]['primitive'] is not None else 'default' + if primitive != 'default' or (action_mapped == 'grasp' and type == 'passive'): + if action_mapped not in obj.elements[type]: + print('No %s element for %s' % (action_mapped, obj.name)) + return None, None + element = obj.elements[type][action_mapped][primitive] + else: + element = [] + for primitive in obj.elements[type][action_mapped]: + _element = obj.elements[type][action_mapped][primitive] + if isinstance(_element, list): + element += _element + else: + element.append(_element) + return element, primitive + + passive_element, passive_primitive = _load_element(passive_obj, 'passive') + if not single_obj: + active_element, active_primitive = _load_element(active_obj, 'active') + else: + active_element, active_primitive = passive_element, passive_primitive + return action, active_obj_id, passive_obj_id, active_element, passive_element, active_primitive, passive_primitive + + +def select_obj(objects, stages, robot): + gripper2obj = None + extra_params = stages[0].get('extra_params', {}) + arm = extra_params.get('arm', 'right') + current_gripper_pose = robot.get_ee_pose(id=arm) + + ''' 初筛抓取pose,得到 grasp_poses_canonical, grasp_poses ''' + grasp_stage_id = None + + if stages[0]['action'] in ['push', 'reset', 'click']: + gripper2obj = current_gripper_pose + + elif stages[0]['action'] in ['pick', 'grasp', 'hook']: + action = stages[0]['action'] + + ''' 筛掉无IK解的grasp pose ''' + grasp_stage_id = 0 + grasp_stage = parse_stage(stages[0], objects) + _, _, passive_obj_id, _, passive_element, _, _ = grasp_stage + grasp_obj_id = passive_obj_id + grasp_poses_canonical = passive_element['grasp_pose'].copy() + grasp_widths = passive_element['width'] + + grasp_poses_canonical[:, :3, :3] = grasp_poses_canonical[:, :3, :3] @ robot.robot_gripper_2_grasp_gripper[ + np.newaxis, ...] + + grasp_poses_canonical_flip = [] + for _i in range(grasp_poses_canonical.shape[0]): + grasp_poses_canonical_flip.append(rotate_along_axis(grasp_poses_canonical[_i], 180, 'z', use_local=True)) + grasp_poses_canonical_flip = np.stack(grasp_poses_canonical_flip) + grasp_poses_canonical = np.concatenate([grasp_poses_canonical, grasp_poses_canonical_flip], axis=0) + grasp_widths = np.concatenate([grasp_widths, grasp_widths], axis=0) + grasp_poses = objects[passive_obj_id].obj_pose[np.newaxis, ...] @ grasp_poses_canonical + + # filter with IK-checking + ik_success, _ = robot.solve_ik(grasp_poses, ee_type='gripper', arm=arm, type='Simple') + grasp_poses_canonical, grasp_poses = grasp_poses_canonical[ik_success], grasp_poses[ik_success] + grasp_widths = grasp_widths[ik_success] + print('%s, %s, Filtered grasp pose with isaac-sim IK: %d/%d' % (action, passive_obj_id, grasp_poses.shape[0], + ik_success.shape[0])) + + if len(grasp_poses) == 0: + print(action, 'No grasp_gripper_pose can pass Isaac IK') + return [] + + ''' 基于有IK解的grasp pose分数,选择最优的passive primitive element,同时选出最优的一个grasp pose''' + if grasp_stage_id is not None: + next_stage_id = grasp_stage_id + 1 + if next_stage_id < len(stages): + action, active_obj_id, passive_obj_id, active_elements, passive_elements, active_primitive, passive_primitive = parse_stage( + stages[next_stage_id], objects) + + single_obj = active_obj_id == passive_obj_id + + active_obj = objects[active_obj_id] + passive_obj = objects[passive_obj_id] + passive_element = passive_elements[np.random.choice(len(passive_elements))] + + if action == 'place': # TODO A2D暂时这样搞,Franka要取消 + # import ipdb; ipdb.set_trace() + obj_pose = active_obj.obj_pose + mesh = trimesh.load(active_obj.info['mesh_file'], force="mesh") + mesh.apply_scale(0.001) + mesh.apply_transform(obj_pose) + pts, _ = trimesh.sample.sample_surface(mesh, 200) # 表面采样 + xyz = np.array([np.mean(pts[:, 0]), np.mean(pts[:, 1]), np.percentile(pts[:, 2], 1)]) + + direction = np.array([0, 0, -1]) + xyz_canonical = (np.linalg.inv(obj_pose) @ np.array([*xyz, 1]))[:3] + direction_canonical = (np.linalg.inv(obj_pose) @ np.array([*direction, 0]))[:3] + active_elements = [{'xyz': xyz_canonical, 'direction': direction_canonical}] + + # import ipdb; ipdb.set_trace() + t0 = time.time() + element_ik_score = [] + grasp_pose_ik_score = [] + for active_element in active_elements: + # interaction between two rigid objects + obj_pose = active_obj.obj_pose + + N_align = 12 + if not single_obj: + active_obj.xyz, active_obj.direction = active_element['xyz'], active_element['direction'] + if passive_obj_id == 'fix_pose': + passive_obj.xyz, passive_obj.direction = passive_element['xyz'], passive_element['direction'] + target_obj_poses = get_aligned_fix_pose(active_obj, passive_obj, N=N_align) + else: + passive_obj.xyz, passive_obj.direction = passive_element['xyz'], passive_element['direction'] + target_obj_poses = get_aligned_pose(active_obj, passive_obj, N=N_align) + else: # 物体自身移动 + transform = np.eye(4) + transform[:3, 3] = active_element['xyz'] + target_obj_poses = (obj_pose @ transform)[np.newaxis, ...] + N_align = 1 + + N_obj_pose = target_obj_poses.shape[0] + N_grasp_pose = grasp_poses_canonical.shape[0] + target_gripper_poses = ( + target_obj_poses[:, np.newaxis, ...] @ grasp_poses_canonical[np.newaxis, ...]).reshape(-1, + 4, 4) + + ik_success, _ = robot.solve_ik(target_gripper_poses, ee_type='gripper', type='Simple', arm=arm) + element_ik_score.append(np.max(ik_success.reshape(N_obj_pose, N_grasp_pose).sum(axis=1))) + + grasp_pose_ik = ik_success.reshape(N_obj_pose, N_grasp_pose) + grasp_pose_ik_score.append(np.sum(grasp_pose_ik, axis=0)) + + # print("ik solve time: ", time.time() - t0) + best_element_id = np.argmax(element_ik_score) + best_active_element = active_elements[best_element_id] + + if not single_obj: + active_obj.elements['active'][action] = {active_primitive: best_active_element} + + grasp_ik_score = grasp_pose_ik_score[best_element_id] + + # import ipdb;ipdb.set_trace() + _mask = grasp_ik_score >= np.median(grasp_ik_score) / 2 + best_grasp_poses = grasp_poses[_mask] + + # downsample grasp pose + downsample_num = 100 + grasps_num = best_grasp_poses.shape[0] + selected_index = np.random.choice(grasps_num, size=min(downsample_num, grasps_num), replace=False) + best_grasp_poses = best_grasp_poses[selected_index] + # if best_grasp_poses.shape[0] > downsample_num: + # best_grasp_poses = best_grasp_poses[:downsample_num] + if best_grasp_poses.shape[0] > 1: + joint_names = robot.joint_names[arm] + + ik_success, ik_info = robot.solve_ik(best_grasp_poses, ee_type='gripper', type='Simple', arm=arm) + best_grasp_poses = best_grasp_poses[ik_success] + ik_joint_positions = ik_info["joint_positions"][ik_success] + ik_joint_names = ik_info["joint_names"][ik_success] + ik_jacobian_score = ik_info["jacobian_score"][ik_success] + + if len(best_grasp_poses) == 0: + print(action, 'No best_grasp_poses can pass curobo IK') + return [] + target_joint_positions = [] + for ik_joint_position, ik_joint_name in zip(ik_joint_positions, ik_joint_names): + temp_target_joint_positions = [] + for joint_name in joint_names: + temp_target_joint_positions.append(ik_joint_position[list(ik_joint_name).index(joint_name)]) + target_joint_positions.append(np.array(temp_target_joint_positions)) + target_joint_positions = np.array(target_joint_positions) + + # choose target pose based on the ik jacobian score and ik joint positions + cur_joint_states = robot.client.get_joint_positions().states + cur_joint_positions = [] + for key in cur_joint_states: + if key.name in joint_names: + cur_joint_positions.append(key.position) + cur_joint_positions = np.array(cur_joint_positions) + joint_pos_dist = np.linalg.norm(target_joint_positions - cur_joint_positions[np.newaxis, :], axis=1) + # joint_pos_dist = (joint_pos_dist - np.min(joint_pos_dist))/(np.max(joint_pos_dist) - np.min(joint_pos_dist)) + # ik_jacobian_score = (ik_jacobian_score - np.min(ik_jacobian_score))/(np.max(ik_jacobian_score) - np.min(ik_jacobian_score)) + cost = joint_pos_dist # - ik_jacobian_score + idx_sorted = np.argsort(cost) + best_grasp_pose = best_grasp_poses[idx_sorted][0] + + else: + best_grasp_pose = best_grasp_poses[0] + + best_grasp_pose_canonical = np.linalg.inv(objects[grasp_obj_id].obj_pose) @ best_grasp_pose + gripper2obj = best_grasp_pose_canonical + else: + gripper2obj = grasp_poses_canonical[0] + return gripper2obj + + +def split_grasp_stages(stages): + split_stages = [] + i = 0 + while i < len(stages): + if stages[i]['action'] in ['pick', 'grasp', 'hook']: + if (i + 1) < len(stages) and stages[i + 1]['action'] not in ['pick', 'grasp', 'hook']: + split_stages.append([stages[i], stages[i + 1]]) + i += 2 + else: + split_stages.append([stages[i]]) + i += 1 + else: + split_stages.append([stages[i]]) + i += 1 + return split_stages + + +def generate_action_stages(objects, all_stages, robot): + split_stages = split_grasp_stages(all_stages) + + action_stages = [] + for stages in split_stages: + gripper2obj = select_obj(objects, stages, robot) + if gripper2obj is None or len(gripper2obj) == 0: + print('No gripper2obj pose can pass IK') + gripper2obj = select_obj(objects, stages, robot) + return [] + for stage in stages: + extra_params = stage.get('extra_params', {}) + arm = extra_params.get('arm', 'right') + current_gripper_pose = robot.get_ee_pose(id=arm) + action, active_obj_id, passive_obj_id, active_elements, passive_elements, active_primitive, passive_primitive = parse_stage( + stage, objects) + active_obj = objects[active_obj_id] + passive_obj = objects[passive_obj_id] + + single_obj = active_obj_id == passive_obj_id + + substages = None + if action in ['reset']: + substages = True + elif action in ['pick', 'grasp', 'hook']: + substages = build_stage(action)(active_obj_id, passive_obj_id, active_elements, passive_elements, + gripper2obj, extra_params=stage.get('extra_params', None)) + elif action in ['slide', 'shave', 'brush', 'wipe', 'pull', 'rotate', 'pull', 'move', + 'reset']: # grasp + 物体自身运动 + passive_element = passive_elements[np.random.choice(len(passive_elements))] + substages = build_stage(action)( + active_obj_id=active_obj_id, + passive_obj_id=passive_obj_id, + passive_element=passive_element + ) + else: + passive_element = passive_elements[np.random.choice(len(passive_elements))] + # active_element = active_elements[np.random.choice(len(active_elements))] if isinstance(active_elements, list) else active_elements + if not isinstance(active_elements, list): + active_elements = [active_elements] + + for active_element in active_elements: + joint_names = robot.joint_names[arm] + + # interaction between two rigid objects + obj_pose = active_obj.obj_pose + anchor_pose = passive_obj.obj_pose + current_obj_pose_canonical = np.linalg.inv(anchor_pose) @ obj_pose + active_obj.xyz, active_obj.direction = active_element['xyz'], active_element['direction'] + passive_obj.xyz, passive_obj.direction = passive_element['xyz'], passive_element['direction'] + if active_obj.name == 'gripper': + gripper2obj = np.eye(4) + + if passive_obj_id == 'fix_pose': + # import ipdb;ipdb.set_trace() + target_obj_poses = get_aligned_fix_pose(active_obj, passive_obj, N=18) + else: + target_obj_poses = get_aligned_pose(active_obj, passive_obj, N=18) + target_gripper_poses = target_obj_poses @ gripper2obj[np.newaxis, ...] + # import ipdb;ipdb.set_trace() + + # downsample target_gripper_poses + downsample_num = 100 + if target_gripper_poses.shape[0] > downsample_num: + target_gripper_poses = target_gripper_poses[:downsample_num] + ik_success, ik_info = robot.solve_ik(target_gripper_poses, ee_type='gripper', type='AvoidObs', + arm=arm) + target_gripper_poses_pass_ik = target_gripper_poses[ik_success] + ik_joint_positions = ik_info["joint_positions"][ik_success] + ik_joint_names = ik_info["joint_names"][ik_success] + ik_jacobian_score = ik_info["jacobian_score"][ik_success] + + if len(target_gripper_poses_pass_ik) == 0: + print(action, ': No target_obj_pose can pass isaac curobo IK') + continue + + target_joint_positions = [] + for ik_joint_position, ik_joint_name in zip(ik_joint_positions, ik_joint_names): + temp_target_joint_positions = [] + for joint_name in joint_names: + temp_target_joint_positions.append(ik_joint_position[list(ik_joint_name).index(joint_name)]) + target_joint_positions.append(np.array(temp_target_joint_positions)) + target_joint_positions = np.array(target_joint_positions) + + # choose target pose based on the jacobian score and ik joint positions difference + cur_joint_states = robot.client.get_joint_positions().states + cur_joint_positions = [] + for key in cur_joint_states: + if key.name in joint_names: + cur_joint_positions.append(key.position) + cur_joint_positions = np.array(cur_joint_positions) + joint_pos_dist = np.linalg.norm(target_joint_positions - cur_joint_positions[np.newaxis, :], axis=1) + # joint_pos_dist = (joint_pos_dist - np.min(joint_pos_dist))/(np.max(joint_pos_dist) - np.min(joint_pos_dist)) + # ik_jacobian_score = (ik_jacobian_score - np.min(ik_jacobian_score))/(np.max(ik_jacobian_score) - np.min(ik_jacobian_score)) + cost = joint_pos_dist # - ik_jacobian_score + idx_sorted = np.argsort(cost) + + best_target_gripper_pose = target_gripper_poses_pass_ik[idx_sorted][0] + + best_target_obj_pose = best_target_gripper_pose @ np.linalg.inv(gripper2obj) + target_obj_pose_canonical = np.linalg.inv( + anchor_pose) @ best_target_obj_pose # TODO 暂时只取一个可行解,后面要结合grasp pose做整条trajectory的joint solve + + part2obj = np.eye(4) + part2obj[:3, 3] = active_obj.xyz + obj2part = np.linalg.inv(part2obj) + + substages = build_stage(action)( + active_obj_id=active_obj_id, + passive_obj_id=passive_obj_id, + target_pose=target_obj_pose_canonical, + current_pose=current_obj_pose_canonical, + obj2part=obj2part, + vector_direction=passive_element['direction'], + passive_element=passive_element, + extra_params=stage.get('extra_params', {}) + ) + break + + if substages is None: + print(action, ': No target_obj_pose can pass IK') + return [] + action_stages.append((action, substages)) + + return action_stages + + +def solve_target_gripper_pose(stage, objects): + active_obj_ID, passive_obj_ID, target_pose_canonical, gripper_action, transform_world, motion_type = stage + + anchor_pose = objects[passive_obj_ID].obj_pose + + if motion_type == 'Trajectory': + assert len(target_pose_canonical.shape) == 3, 'The target_pose should be a list of poses' + target_pose = anchor_pose[np.newaxis, ...] @ target_pose_canonical + target_pose = transform_world[np.newaxis, ...] @ target_pose + else: + target_pose = anchor_pose @ target_pose_canonical + target_pose = transform_world @ target_pose + assert 'gripper' in objects, 'The gripper should be the first one in the object list' + current_gripper_pose = objects['gripper'].obj_pose + + if active_obj_ID == 'gripper': + target_gripper_pose = target_pose + else: + current_obj_pose = objects[active_obj_ID].obj_pose + gripper2obj = np.linalg.inv(current_obj_pose) @ current_gripper_pose + if len(target_pose.shape) == 3: + gripper2obj = gripper2obj[np.newaxis, ...] + + target_obj_pose = target_pose + target_gripper_pose = target_obj_pose @ gripper2obj + + return target_gripper_pose + + + + + diff --git a/data_gen_dependencies/object.py b/data_gen_dependencies/object.py new file mode 100644 index 0000000..276b7d4 --- /dev/null +++ b/data_gen_dependencies/object.py @@ -0,0 +1,271 @@ +import json +import os +import pickle + +import numpy as np +from grasp_nms import nms_grasp +from data_gen_dependencies.transforms import transform_coordinates_3d + + +class OmniObject: + def __init__(self, + name='obj', + cam_info=None, + type='Active', + mask=None, + pose=np.eye(4), + size=np.array([0.001, 0.001, 0.001]) + ): + self.name = name + self.cam_info = cam_info + self.type = type + self.obj_pose = pose + self.obj_length = size + + self.xyz = np.array([0, 0, 0]) + self.direction = np.array([0, 0, 0.05]) + self.direction_proposals = None + self.elements = {} + if name == 'gripper': + self.elements['active'] = { + 'push': [ + { + "part": "finger edge", + "task": "forward push", + "xyz": np.array([0, 0, 0]), + "direction": np.array([0, 0, 0.08]), + }, + { + "part": "finger edge", + "task": "side push", + "xyz": np.array([0, 0, 0]), + "direction": np.array([1, 0, 0.3]), + }, + { + "part": "finger edge", + "task": "side push", + "xyz": np.array([0, 0, 0]), + "direction": np.array([-1, 0, 0.3]), + }, + ], + 'click': { + "part": "finger edge", + "task": "click", + "xyz": np.array([0, 0, 0]), + "direction": np.array([0, 0, 1]), + }, + 'touch': { + "part": "finger edge", + "task": "touch", + "xyz": np.array([0, 0, 0]), + "direction": np.array([0, 0, 1]), + }, + 'pull': { + "part": "finger edge", + "task": "pull", + "xyz": np.array([0, 0, 0]), + "direction": np.array([0, 0, 0.08]), + }, + 'rotate': { + "part": "finger edge", + "task": "rotate", + "xyz": np.array([0, 0, 0]), + "direction": np.array([0, 0, 0.08]), + } + } + + @classmethod + def from_obj_dir(cls, obj_dir, obj_info=None): + if "interaction" in obj_info: + obj_info = obj_info + interaction_info = obj_info["interaction"] + part_joint_limits_info = obj_info.get("part_joint_limits", None) + + else: + + obj_info_file = '%s/object_parameters.json' % obj_dir + interaction_label_file = '%s/interaction.json' % obj_dir + + assert os.path.exists(obj_info_file), 'object_parameters.json not found in %s' % obj_dir + assert os.path.exists(interaction_label_file), 'interaction.json not found in %s' % obj_dir + + obj_info = json.load(open(obj_info_file)) + interaction_data = json.load(open(interaction_label_file)) + interaction_info = interaction_data['interaction'] + part_joint_limits_info = interaction_data.get("part_joint_limits", None) + + obj = cls( + name=obj_info['object_id'], + size=obj_info['size'] + ) + + mesh_file = '%s/Aligned.obj' % obj_dir + if os.path.exists(mesh_file): + obj_info['mesh_file'] = mesh_file + + obj.part_joint_limits = part_joint_limits_info + + ''' Load interaction labels ''' + obj.part_ids = [] + for type in ['active', 'passive']: + if type not in interaction_info: + continue + for action in interaction_info[type]: + action_info = interaction_info[type][action] + # import ipdb;ipdb.set_trace() + if action == 'grasp' and type == 'passive': + for grasp_part in action_info: + grasp_files = action_info[grasp_part] + grasp_data = { + "grasp_pose": [], + "width": [] + } + if isinstance(grasp_files, str): + grasp_files = [grasp_files] + for grasp_file in grasp_files: + grasp_file = '%s/%s' % (obj_dir, grasp_file) + if not os.path.exists(grasp_file): + print('Grasp file %s not found' % grasp_file) + continue + _data = pickle.load(open(grasp_file, 'rb')) + _data['grasp_pose'] = np.array(_data['grasp_pose']) + _data['width'] = np.array(_data['width']) + + if _data['grasp_pose'].shape[0] == 0: + print('Empty grasp pose in %s' % grasp_file) + continue + grasp_data['grasp_pose'].append(_data['grasp_pose']) + grasp_data['width'].append(_data['width']) + if len(grasp_data['grasp_pose']) == 0: + print('Empty grasp pose in %s' % grasp_file) + continue + + grasp_data['grasp_pose'] = np.concatenate(grasp_data['grasp_pose']) + grasp_data['width'] = np.concatenate(grasp_data['width']) + + N_grasp = grasp_data['grasp_pose'].shape[0] + max_grasps = 100 + use_nms = True if N_grasp > 60 else False + if use_nms: + # import ipdb;ipdb.set_trace() + gripper_pose = grasp_data['grasp_pose'] + N = gripper_pose.shape[0] + width = grasp_data['width'][:N, np.newaxis] + + rotation = gripper_pose[:, :3, :3].reshape(N, -1) + translation = gripper_pose[:, :3, 3] + + height = 0.02 * np.ones_like(width) + depth = np.zeros_like(width) + score = np.ones_like(width) * 0.1 + obj_id = -1 * np.ones_like(width) + + grasp_group_array = np.concatenate( + [score, width, height, depth, rotation, translation, obj_id], axis=-1) + + if True: + nms_info = interaction_info.get('grasp_nms_threshold', + {'translation': 0.015, 'rotation': 25.0}) + translation_thresh = nms_info['translation'] + rotation_thresh = nms_info['rotation'] / 180.0 * np.pi + grasp_group_array = nms_grasp(grasp_group_array, translation_thresh, rotation_thresh) + + rotation = grasp_group_array[:, 4:4 + 9] + translation = grasp_group_array[:, 4 + 9:4 + 9 + 3] + width = grasp_group_array[:, 1] + grasp_data['grasp_pose'] = np.tile(np.eye(4), (grasp_group_array.shape[0], 1, 1)) + grasp_data['grasp_pose'][:, :3, :3] = rotation.reshape(-1, 3, 3) + grasp_data['grasp_pose'][:, :3, 3] = translation + grasp_data['width'] = width + + print(f"Grasp num of {obj.name} after NMS: {grasp_data['grasp_pose'].shape[0]}/{N_grasp}") + # downsample + if grasp_data['grasp_pose'].shape[0] > max_grasps: + grasp_num = grasp_data['grasp_pose'].shape[0] + index = np.random.choice(grasp_num, max_grasps, replace=False) + grasp_data['grasp_pose'] = grasp_data['grasp_pose'][index] + grasp_data['width'] = grasp_data['width'][index] + print( + f"Grasp num of {obj.name} after downsample: {grasp_data['grasp_pose'].shape[0]}/{N_grasp}") + else: + print(f"Grasp num of {obj.name}: {grasp_data['grasp_pose'].shape[0]}/{N_grasp}") + + action_info[grasp_part] = grasp_data + + interaction_info[type][action] = action_info + else: + for primitive in action_info: + for primitive_info in action_info[primitive]: + if 'part_id' in primitive_info: + obj.part_ids.append(primitive_info['part_id']) + obj.elements = interaction_info + obj.info = obj_info + # import ipdb;ipdb.set_trace() + obj.is_articulated = True if part_joint_limits_info is not None else False + return obj + + def set_element(self, element): + action = element['action'] + self.elements[action] = element + + def set_mask(self, mask, roi=None): + self.mask = mask + self.roi = roi + + def set_pose(self, pose, length): + self.obj_pose = pose + self.obj_length = length + + def set_part(self, xyz=None, direction=None, direction_proposals=None, relative=True): + if xyz is not None: + if not isinstance(xyz, np.ndarray): + xyz = np.array(xyz) + if relative: + xyz = xyz * self.obj_length / 2.0 + self.xyz = xyz + + if direction is not None: + if not isinstance(direction, np.ndarray): + direction = np.array(direction) + direction = direction / np.linalg.norm(direction) * 0.08 + self.direction = direction + + if direction_proposals is not None: + self.direction_proposals = direction_proposals + + def format_object(self, relative=True): + xyz, direction = self.xyz, self.direction + + obj_type = self.type.lower() + if obj_type == 'active': + xyz_start = xyz + xyz_end = xyz_start + direction + elif obj_type == 'passive' or obj_type == 'plane': + xyz_end = xyz + xyz_start = xyz_end - direction + + arrow_in_obj = np.array([xyz_start, xyz_end]).transpose(1, 0) + arrow_in_world = transform_coordinates_3d(arrow_in_obj, self.obj_pose).transpose(1, 0) + + xyz_start_world, xyz_end_world = arrow_in_world + + direction_world = xyz_end_world - xyz_start_world + direction_world = direction_world / np.linalg.norm(direction_world) + + part2obj = np.eye(4) + part2obj[:3, 3] = xyz_start + self.obj2part = np.linalg.inv(part2obj) + + object_world = { + 'pose': self.obj_pose, + 'length': self.obj_length, + 'xyz_start': xyz_start, + 'xyz_end': xyz_end, + 'xyz_start_world': xyz_start_world, + 'xyz_end_world': xyz_end_world, + 'direction': direction_world, + 'obj2part': self.obj2part + } + return object_world + + diff --git a/data_gen_dependencies/omni_robot.py b/data_gen_dependencies/omni_robot.py new file mode 100644 index 0000000..513e918 --- /dev/null +++ b/data_gen_dependencies/omni_robot.py @@ -0,0 +1,540 @@ +# 发送指令 +# 001. 拍照 +# 002. 左手/右手移动指定位姿 +# 003. 全身关节移动到指定角度 +# 004. 获取夹爪的位姿 +# 005. 获取任意物体的位姿 +# 006. 添加物体 +import time + +import numpy as np +from scipy.spatial.transform import Rotation + +from data_gen_dependencies.client import Rpc_Client +from data_gen_dependencies.robot import Robot +from data_gen_dependencies.utils import ( + get_quaternion_wxyz_from_rotation_matrix, + get_rotation_matrix_from_quaternion, + matrix_to_euler_angles, + get_quaternion_from_euler, + get_quaternion_from_rotation_matrix +) + + +def normalize(vec, eps=1e-12): + norm = np.linalg.norm(vec, axis=-1) + norm = np.maximum(norm, eps) + out = (vec.T / norm).T + return out + + +def rot6d_to_mat(d6): + a1, a2 = d6[..., :3], d6[..., 3:] + b1 = normalize(a1) + b2 = a2 - np.sum(b1 * a2, axis=-1, keepdims=True) * b1 + b2 = normalize(b2) + b3 = np.cross(b1, b2, axis=-1) + out = np.stack((b1, b2, b3), axis=-2) + return out + + +def pose10d_to_mat(d10): + pos = d10[..., :3] + d6 = d10[..., 3:] + rotmat = rot6d_to_mat(d6) + out = np.zeros(d10.shape[:-1] + (4, 4), dtype=d10.dtype) + out[..., :3, :3] = rotmat + out[..., :3, 3] = pos + out[..., 3, 3] = 1 + return out + + +class IsaacSimRpcRobot(Robot): + def __init__(self, robot_cfg="Franka_120s.json", scene_usd="Pick_Place_Franka_Yellow_Table.usd", + client_host="localhost:50051", position=[0, 0, 0], rotation=[1, 0, 0, 0], + stand_type="cylinder", stand_size_x=0.1, stand_size_y=0.1): + robot_urdf = robot_cfg.split(".")[0] + ".urdf" + self.client = Rpc_Client(client_host, robot_urdf) + self.client.InitRobot(robot_cfg=robot_cfg, robot_usd="", scene_usd=scene_usd, init_position=position, + init_rotation=rotation, + stand_type=stand_type, stand_size_x=stand_size_x, stand_size_y=stand_size_y) + self.cam_info = None + self.robot_gripper_2_grasp_gripper = np.array([ + [0.0, 0.0, 1.0], + [0.0, 1.0, 0.0], + [-1.0, 0.0, 0.0] + ]) + # TODO config file + if "A2D" in robot_cfg: + self.joint_names = { + "left": ['Joint1_l', 'Joint2_l', 'Joint3_l', 'Joint4_l', 'Joint5_l', 'Joint6_l', 'Joint7_l'], + "right": ['Joint1_r', 'Joint2_r', 'Joint3_r', 'Joint4_r', 'Joint5_r', 'Joint6_r', 'Joint7_r'] + } + else: + self.joint_names = { + "left": ['panda_joint1', 'panda_joint2', 'panda_joint3', 'panda_joint4', 'panda_joint5', 'panda_joint6', + 'panda_joint7'], + "right": ['panda_joint1', 'panda_joint2', 'panda_joint3', 'panda_joint4', 'panda_joint5', + 'panda_joint6', 'panda_joint7'] + } + + self.init_position = position + self.init_rotation = rotation + self.setup() + + def reset(self): + self.target_object = None + self.client.reset() + time.sleep(0.5) + + # self.client.set_joint_positions([0, -0.569, 0, -2.809, 0, 3.04, 0.741, 1.0, 0.0, 1.0, -1.0, 0.0, 1.0, 0.0, 0.0], False) + + # self.close_gripper(id="right") + time.sleep(0.5) + pass + + def setup(self): + self.target_object = None + + # set robot init state + + def get_observation(self, data_keys): + """ + Example + data_keys = { + 'camera': { + 'camera_prim_list': [ + '/World/Raise_A2_W_T1/head_link/D455_Solid/TestCameraDepth' + ], + 'render_depth': True, + 'render_semantic': True + }, + 'pose': [ + '/World/Raise_A2_W_T1/head_link/D455_Solid/TestCameraDepth' + ], + 'joint_position': True, + 'gripper': True + } + """ + + observation = {} + observation = self.client.get_observation(data_keys) + + if "camera" in data_keys: + render_depth = data_keys["camera"]["render_depth"] if "render_depth" in data_keys["camera"] else False + render_semantic = ( + data_keys["camera"]["render_semantic"] if "render_semantic" in data_keys["camera"] else False + ) + + cam_data = {} + for cam_prim in data_keys["camera"]["camera_prim_list"]: + cam_data[cam_prim] = {} + response = self.client.capture_frame(camera_prim_path=cam_prim) + # cam_info + cam_info = { + "W": response.color_info.width, + "H": response.color_info.height, + "K": np.array( + [ + [response.color_info.fx, 0, response.color_info.ppx], + [0, response.color_info.fy, response.color_info.ppy], + [0, 0, 1], + ] + ), + "scale": 1, + } + cam_data[cam_prim]["cam_info"] = cam_info + # c2w + c2w = self.get_prim_world_pose(cam_prim, camera=True) + cam_data[cam_prim]['c2w'] = c2w + # rgb + rgb = np.frombuffer(response.color_image.data, dtype=np.uint8).reshape(cam_info["H"], cam_info["W"], 4)[ + :, :, :3 + ] + cam_data[cam_prim]["image"] = rgb + # depth + if render_depth: + depth = np.frombuffer(response.depth_image.data, dtype=np.float32).reshape( + cam_info["H"], cam_info["W"] + ) + cam_data[cam_prim]["depth"] = depth + + # semantic + if render_semantic: + response = self.client.capture_semantic_frame(camera_prim_path=cam_prim) + prim_id = {} + for label in response.label_dict: + name, id = label.label_name, label.label_id + prim = '/World/Objects/' + name + prim_id[prim] = id + mask = np.frombuffer(response.semantic_mask.data, dtype=np.int32).reshape(cam_info['H'], + cam_info['W']) + cam_data[cam_prim]['semantic'] = { + 'prim_id': prim_id, + 'mask': mask + } + + observation["camera"] = cam_data + + if "pose" in data_keys: + pose_data = {} + for obj_prim in data_keys["pose"]: + pose_data[obj_prim] = self.get_prim_world_pose(prim_path=obj_prim) + observation["pose"] = pose_data + + if "joint_position" in data_keys: + # TODO 是否要区分双臂? + joint_position = self.client.get_joint_positions() + observation["joint_position"] = joint_position + + if "gripper" in data_keys and data_keys["gripper"] == True: + gripper_state = {} + gripper_state["left"] = self.client.get_gripper_state(is_right=False) + gripper_state["right"] = self.client.get_gripper_state(is_right=True) + observation["gripper"] = gripper_state + + return observation + + def open_gripper(self, id="left", width=0.1): + is_Right = True if id == "right" else False + if width is None: + width = 0.1 + self.client.set_gripper_state(gripper_command="open", is_right=is_Right, opened_width=width) + self.client.DetachObj() + + def close_gripper(self, id="left", force=50): + is_Right = True if id == "right" else False + self.client.set_gripper_state(gripper_command="close", is_right=is_Right, opened_width=0.00) + # if self.target_object is not None and is_Right: + # self.client.DetachObj() + # self.client.AttachObj(prim_paths=['/World/Objects/'+self.target_object]) + + def move_pose(self, target_pose, type, arm="right", **kwargs): + # import ipdb;ipdb.set_trace() + if type.lower() == 'trajectory': + content = { + "type": 'trajectory_4x4_pose', + "data": target_pose, + } + else: + if type == 'AvoidObs': + type = 'ObsAvoid' + elif type == 'Normal': + type = 'Simple' + + content = { + "type": "matrix", + "matrix": target_pose, + 'trajectory_type': type, + "arm": arm + } + return self.move(content) + + def set_gripper_action(self, action, arm="right"): + # self.gripper_server.set_gripper_action(action) + assert arm in ['left', 'right'] + time.sleep(0.3) + if action is None: + return + if action == 'open': + self.open_gripper(id=arm, width=0.1) + elif action == 'close': + self.close_gripper(id=arm) + + def move(self, content): + """ + type: str, 'matrix' or 'joint' + 'pose': np.array, 4x4 + 'joint': np.array, 1xN + """ + type = content["type"] + arm_name = content.get('arm', "right") + print(arm_name) + if type == "matrix": + if isinstance(content["matrix"], list): + content["matrix"] = np.array(content["matrix"]) + R, T = content["matrix"][:3, :3], content["matrix"][:3, 3] + quat_wxyz = get_quaternion_from_euler(matrix_to_euler_angles(R), order="ZYX") + ee_interpolation = False + target_position = T + if "trajectory_type" in content and content["trajectory_type"] == "Simple": + is_backend = True + target_rotation = quat_wxyz + elif "trajectory_type" in content and content["trajectory_type"] == "Straight": + is_backend = True + target_rotation = quat_wxyz + ee_interpolation = True + else: + is_backend = False + init_rotation_matrix = get_rotation_matrix_from_quaternion(self.init_rotation) + translation_matrix = np.zeros((4, 4)) + translation_matrix[:3, :3] = init_rotation_matrix + translation_matrix[:3, 3] = self.init_position + translation_matrix[3, 3] = 1 + target_matrix = np.linalg.inv(translation_matrix) @ content["matrix"] + target_rotation_matrix, target_position = target_matrix[:3, :3], target_matrix[:3, 3] + target_rotation = get_quaternion_from_euler(matrix_to_euler_angles(target_rotation_matrix), order="ZYX") + + print(f"target_position is{target_position}") + print(f"target_rotation is{target_rotation}") + # import ipdb;ipdb.set_trace() + # inverse_local_orientation = [self.init_rotation[0], -1*self.init_rotation[1], -1*self.init_rotation[2], -1*self.init_rotation[3]] + # local_position = quat_multiplication(inverse_local_orientation,T-self.init_position) + # target_position = local_position + state = ( + self.client.moveto( + target_position=target_position, + target_quaternion=target_rotation, + arm_name=arm_name, + is_backend=is_backend, + ee_interpolation=ee_interpolation + ).errmsg + == "True" + ) + print("move!", T, quat_wxyz, state) + + elif type == "quat": + + if "trajectory_type" in content and content["trajectory_type"] == "Simple": + is_backend = True + else: + is_backend = False + state = ( + self.client.moveto( + target_position=content["xyz"], + target_quaternion=content["quat"], + arm_name="left", + is_backend=is_backend, + ).errmsg + == "True" + ) + + + elif type == "joint": + state = self.client.set_joint_positions(content["position"]) + + elif type == "euler": + is_backend = True + + T_curr = self.get_ee_pose() + xyzrpy_input = content["xyzrpy"] + xyz_curr = T_curr[:3, 3] + rpy_curr = Rotation.from_matrix(T_curr[:3, :3]).as_euler("xyz") + + incr = content.get("incr", False) + if incr: + xyz_tgt = xyz_curr + np.array(xyzrpy_input[:3]) + rpy_tgt = rpy_curr + np.array(xyzrpy_input[3:]) + quat_tgt = get_quaternion_from_rotation_matrix( + Rotation.from_euler("xyz", rpy_tgt).as_matrix() + ) + else: + raise NotImplementedError + + state = ( + self.client.moveto( + target_position=xyz_tgt, + target_quaternion=quat_tgt, + arm_name="left", + is_backend=is_backend, + ).errmsg + == "True" + ) + + + elif type == 'move_pose_list': + action_list = content["data"] + for action in action_list: + T_curr = content["ee_transform"] # current ee2world + gripper_curr = content["gripper_position"] + + action_pose10d = action[:-1] + gripper_tgt = action[-1] + xyz_curr = T_curr[:3, 3] + rotation_curr = T_curr[:3, :3] + euler_curr = Rotation.from_matrix(rotation_curr).as_euler("xyz") + + action_repr = content.get("action_repr", "rela") + if action_repr == 'delta': + mat_tgt = pose10d_to_mat(action_pose10d) + xyz_delta = mat_tgt[:3, 3] + rotation_delta = mat_tgt[:3, :3] + xyz_tgt = xyz_curr + np.array(xyz_delta[:3]) + rotation_tgt = np.dot(rotation_curr, rotation_delta) + + euler_tgt = Rotation.from_matrix(rotation_tgt).as_euler("xyz") + quat_tgt = get_quaternion_from_rotation_matrix(rotation_tgt) + else: + raise RuntimeError() + + print(f"xyz_curr: {xyz_curr}, euler_curr: {euler_curr}, gripper_curr: {gripper_curr}") + print(f"xyz_tgt: {xyz_tgt}, euler_tgt: {euler_tgt}, gripper_tgt: {gripper_tgt}") + + start_time = time.time() + state = ( + self.client.moveto( + target_position=xyz_tgt, + target_quaternion=quat_tgt, + arm_name="right", + is_backend=True, + ).errmsg + == "True" + ) + print(f"move state {state}, move executed in {(time.time() - start_time):.3f} s") + + xyz_curr = self.get_ee_pose()[:3, 3] + euler_curr = Rotation.from_matrix(self.get_ee_pose()[:3, :3]).as_euler("xyz") + print(f"xyz_curr: {xyz_curr}, euler_curr: {euler_curr}") + print( + f"xyz dist: {(np.linalg.norm(xyz_curr - xyz_tgt)):.4f}, rpy dist: {(np.linalg.norm(euler_curr - euler_tgt)):.4f}") + + start_time = time.time() + if gripper_curr > 0.5 and gripper_tgt < 0.5: + self.close_gripper(id="right", force=50) + print(f"Close gripper executed in {(time.time() - start_time):.3f} s") + elif gripper_curr < 0.5 and gripper_tgt > 0.5: + self.open_gripper(id="right", width=0.1) + print(f"Open gripper executed in {(time.time() - start_time):.3f} s") + + + elif type.lower() == 'trajectory_4x4_pose': + waypoint_list = content["data"] + + traj = [] + for pose in waypoint_list: + xyz = pose[:3, 3] + quat_wxyz = get_quaternion_from_rotation_matrix(pose[:3, :3]) + pose = list(xyz), list(quat_wxyz) + traj.append(pose) + + print("Set Trajectory ! ") + self.client.SetTrajectoryList(traj, is_block=True) + + # self.client.SetTrajectoryList(traj) # err: not block + state = True + + else: + raise NotImplementedError + + return state + + def get_prim_world_pose(self, prim_path, camera=False): + rotation_x_180 = np.array([[1.0, 0.0, 0.0, 0], [0.0, -1.0, 0.0, 0], [0.0, 0.0, -1.0, 0], [0, 0, 0, 1]]) + response = self.client.get_object_pose(prim_path=prim_path) + x, y, z = ( + response.object_pose.position.x, + response.object_pose.position.y, + response.object_pose.position.z, + ) + quat_wxyz = np.array( + [ + response.object_pose.rpy.rw, + response.object_pose.rpy.rx, + response.object_pose.rpy.ry, + response.object_pose.rpy.rz, + ] + ) + rot_mat = get_rotation_matrix_from_quaternion(quat_wxyz) + + pose = np.eye(4) + pose[:3, :3] = rot_mat + pose[:3, 3] = np.array([x, y, z]) + + if camera: + pose = pose @ rotation_x_180 + return pose + + def pose_from_cam_to_robot(self, pose2cam): + """transform pose from cam-coordinate to robot-coordinate""" + cam2world = self.get_prim_world_pose(prim_path=self.base_camera, camera=True) + pose2world = cam2world @ pose2cam + return pose2world + + def pose_from_world_to_cam(self, pose2world): + """transform pose from world-coordinate to cam-coordinate""" + cam2world = self.get_prim_world_pose(prim_path=self.base_camera, camera=True) + pose2cam = np.linalg.inv(cam2world) @ pose2world + return pose2cam + + def decode_gripper_pose(self, gripper_pose): + """Decode gripper-pose at cam-coordinate to end-pose at robot-coordinate""" + gripper_pose = self.pose_from_cam_to_robot(gripper_pose) + angle = 0 # np.pi / 2 + rot_z = np.array( + [ + [np.cos(angle), -np.sin(angle), 0], + [np.sin(angle), np.cos(angle), 0], + [0, 0, 1], + ] + ) + + flange2gripper = np.eye(4) + flange2gripper[:3, :3] = rot_z + flange2gripper[2, 3] = -0.01 + return gripper_pose @ flange2gripper + + def get_ee_pose(self, id="right", **kwargs): + state = self.client.GetEEPose(is_right=id == "right") + xyz = np.array( + [ + state.ee_pose.position.x, + state.ee_pose.position.y, + state.ee_pose.position.z, + ] + ) + quat = np.array( + [ + state.ee_pose.rpy.rw, + state.ee_pose.rpy.rx, + state.ee_pose.rpy.ry, + state.ee_pose.rpy.rz, + ] + ) + pose = np.eye(4) + pose[:3, 3] = xyz + pose[:3, :3] = get_rotation_matrix_from_quaternion(quat) + return pose + + def check_ik(self, pose, id="right", **kwargs): + xyz, quat = pose[:3, 3], get_quaternion_from_rotation_matrix(pose[:3, :3]) + state = self.client.GetIKStatus(target_position=xyz, target_rotation=quat, is_right=id == "right") + return state.isSuccess + + def solve_ik(self, pose, arm="right", type="Simple", **kwargs): + single_mode = len(pose.shape) == 2 + if single_mode: + pose = pose[np.newaxis, ...] + + xyz, quat = pose[:, :3, 3], get_quaternion_wxyz_from_rotation_matrix(pose[:, :3, :3]) + target_poses = [] + for i in range(len(pose)): + pose = { + "position": xyz[i], + "rotation": quat[i] + } + target_poses.append(pose) + + ObsAvoid = type == 'ObsAvoid' or type == 'AvoidObs' + result = self.client.GetIKStatus(target_poses=target_poses, is_right=arm == 'right', + ObsAvoid=ObsAvoid) # True: isaac, False: curobo + ik_result = [] + jacobian_score = [] + joint_positions = [] + joint_names = [] + for state in result: + ik_result.append(state["status"]) + jacobian_score.append(state["Jacobian"]) + joint_positions.append(state["joint_positions"]) + joint_names.append(state["joint_names"]) + # result = np.array(result) + if single_mode: + ik_result = ik_result[0] + jacobian_score = jacobian_score[0] + joint_positions = joint_positions[0] + joint_names = joint_names[0] + info = { + "jacobian_score": np.array(jacobian_score), + "joint_positions": np.array(joint_positions), + "joint_names": np.array(joint_names) + } + return np.array(ik_result), info diff --git a/data_gen_dependencies/omniagent.py b/data_gen_dependencies/omniagent.py new file mode 100644 index 0000000..9f950d5 --- /dev/null +++ b/data_gen_dependencies/omniagent.py @@ -0,0 +1,459 @@ +# -*- coding: utf-8 -*- +import json +import os +import pickle +import random +import time + +import numpy as np +from pyboot.utils.log import Log + +from data_gen_dependencies.transforms import calculate_rotation_matrix +from data_gen_dependencies.manip_solver import load_task_solution, generate_action_stages, split_grasp_stages +from data_gen_dependencies.base_agent import BaseAgent +from data_gen_dependencies.omni_robot import IsaacSimRpcRobot + +class Agent(BaseAgent): + def __init__(self, robot: IsaacSimRpcRobot): + super().__init__(robot) + self.attached_obj_id = None + + def start_recording(self, task_name, camera_prim_list, fps, render_semantic=False): + self.robot.client.start_recording( + task_name=task_name, + fps=fps, + data_keys={ + "camera": { + "camera_prim_list": camera_prim_list, + "render_depth": False, + "render_semantic": render_semantic, + }, + "pose": ["/World/Raise_A2/gripper_center"], + "joint_position": True, + "gripper": True, + }, + ) + + def generate_layout(self, task_file): + self.task_file = task_file + with open(task_file, "r") as f: + task_info = json.load(f) + + # add mass for stable manipulation + for stage in task_info['stages']: + if stage['action'] in ['place', 'insert', 'pour']: + obj_id = stage['passive']['object_id'] + for i in range(len(task_info['objects'])): + if task_info['objects'][i]['object_id'] == obj_id: + task_info['objects'][i]['mass'] = 10 + break + + self.articulated_objs = [] + for object_info in task_info["objects"]: + if object_info['object_id'] == 'fix_pose': + continue + is_articulated = object_info.get('is_articulated', False) + if is_articulated: + self.articulated_objs.append(object_info['object_id']) + object_info['material'] = 'general' + self.add_object(object_info) + time.sleep(2) + + self.arm = task_info["arm"] + + ''' For A2D: Fix camera rotaton to look at target object ''' + task_related_objs = [] + for stage in task_info['stages']: + for type in ['active', 'passive']: + obj_id = stage[type]['object_id'] + if obj_id == 'gripper' or obj_id in task_related_objs: + continue + task_related_objs.append(obj_id) + + target_lookat_point = [] + for obj in task_info['objects']: + if obj['object_id'] not in task_related_objs: + continue + target_lookat_point.append(obj['position']) + target_lookat_point = np.mean(np.stack(target_lookat_point), axis=0) + self.robot.client.SetTargetPoint(target_lookat_point.tolist()) + + ''' Set material ''' + material_infos = [] + if "object_with_material" in task_info: + for key in task_info['object_with_material']: + material_infos += task_info['object_with_material'][key] + if len(material_infos): + self.robot.client.SetMaterial(material_infos) + time.sleep(0.3) + + ''' Set light ''' + light_infos = [] + if "lights" in task_info: + for key in task_info['lights']: + light_infos += task_info['lights'][key] + if len(light_infos): + self.robot.client.SetLight(light_infos) + time.sleep(0.3) + + ''' Set camera''' + if "cameras" in task_info: + for cam_id in task_info['cameras']: + cam_info = task_info['cameras'][cam_id] + self.robot.client.AddCamera( + cam_id, cam_info['position'], cam_info['quaternion'], + cam_info['width'], cam_info['height'], + cam_info['focal_length'], cam_info['horizontal_aperture'], cam_info['vertical_aperture'], + cam_info['is_local'] + ) + + def update_objects(self, objects, arm='right'): + # update gripper pose + objects['gripper'].obj_pose = self.robot.get_ee_pose(ee_type='gripper', id=arm) + + # update object pose + for obj_id in objects: + if obj_id == 'gripper': + continue + if obj_id == 'fix_pose': + if len(objects['fix_pose'].obj_pose) == 3: + position = objects['fix_pose'].obj_pose + rotation_matrix = calculate_rotation_matrix(objects['fix_pose'].direction, [0, 0, 1]) + objects['fix_pose'].obj_pose = np.eye(4) + objects['fix_pose'].obj_pose[:3, 3] = position.flatten() + objects['fix_pose'].obj_pose[:3, :3] = rotation_matrix + continue + # TODO(unify part_name and obj_name) + if '/' in obj_id: + obj_name = obj_id.split('/')[0] + part_name = obj_id.split('/')[1] + + object_joint_state = self.robot.client.get_object_joint('/World/Objects/%s' % obj_name) + for joint_name, joint_position, joint_velocity in zip(object_joint_state.joint_names, + object_joint_state.joint_positions, + object_joint_state.joint_velocities): + if joint_name[-1] == part_name[-1]: + objects[obj_id].joint_position = joint_position + objects[obj_id].joint_velocity = joint_velocity + + objects[obj_id].obj_pose = self.robot.get_prim_world_pose('/World/Objects/%s' % obj_id) + if hasattr(objects[obj_id], 'info') and 'simple_place' in objects[obj_id].info and objects[obj_id].info[ + 'simple_place']: + down_direction_world = (np.linalg.inv(objects[obj_id].obj_pose) @ np.array([0, 0, -1, 1]))[:3] + down_direction_world = down_direction_world / np.linalg.norm(down_direction_world) * 0.08 + objects[obj_id].elements['active']['place']['direction'] = down_direction_world + + return objects + + def check_task_file(self, task_file): + with open(task_file, "r") as f: + task_info = json.load(f) + + objs_dir = {} + objs_interaction = {} + for obj_info in task_info["objects"]: + obj_id = obj_info["object_id"] + if obj_id == 'fix_pose': + continue + objs_dir[obj_id] = obj_info["data_info_dir"] + if "interaction" in obj_info: + objs_interaction[obj_id] = obj_info["interaction"] + else: + objs_interaction[obj_id] = json.load(open(obj_info["data_info_dir"] + '/interaction.json'))[ + 'interaction'] + + for stage in task_info['stages']: + active_obj_id = stage['active']['object_id'] + passive_obj_id = stage['passive']['object_id'] + if active_obj_id != 'gripper': + if active_obj_id not in objs_dir: + Log.error('Active obj not in objs_dir: %s' % active_obj_id) + return False + if passive_obj_id != 'gripper' and passive_obj_id != 'fix_pose': + if passive_obj_id not in objs_dir: + Log.error('Passive obj not in objs_dir: %s' % passive_obj_id) + return False + data_root = os.path.dirname(os.path.dirname(__file__)) + "/assets" + if stage['action'] in ['grasp', 'pick']: + passive_obj_id = stage['passive']['object_id'] + obj_dir = objs_dir[passive_obj_id] + primitive = stage['passive']['primitive'] + if primitive is None: + file = 'grasp_pose/grasp_pose.pkl' + else: + file = objs_interaction[passive_obj_id]['passive']['grasp'][primitive] + if isinstance(file, list): + file = file[0] + grasp_file = os.path.join(data_root, obj_dir, file) + if not os.path.exists(grasp_file): + Log.error('-- Grasp file not exist: %s' % grasp_file) + return False + + _data = pickle.load(open(grasp_file, 'rb')) + if len(_data['grasp_pose']) == 0: + Log.error('-- Grasp file empty: %s' % grasp_file) + return False + return True + + def run(self, task_file, camera_list, use_recording, workspaces, fps=10, render_semantic=False): + if not self.check_task_file(task_file): + Log.error("Task file bad: %s" % task_file) + return False + print("Start Task:", task_file) + self.reset() + self.attached_obj_id = None + + # import ipdb;ipdb.set_trace() + + self.generate_layout(task_file) + # import ipdb;ipdb.set_trace() + self.robot.open_gripper(id='right') + self.robot.open_gripper(id='left') + + self.robot.reset_pose = { + 'right': self.robot.get_ee_pose(ee_type='gripper', id='right'), + 'left': self.robot.get_ee_pose(ee_type='gripper', id='left'), + } + print('Reset pose:', self.robot.reset_pose) + + task_info = json.load(open(task_file, 'rb')) + stages, objects = load_task_solution(task_info) + objects = self.update_objects(objects) + split_stages = split_grasp_stages(stages) + # import ipdb; ipdb.set_trace() + if use_recording: + self.start_recording(task_name="[%s]" % (os.path.basename(task_file).split(".")[0]), + camera_prim_list=camera_list, fps=fps, + render_semantic=render_semantic) # TODO 录制判断 + + stage_id = -1 + substages = None + for _stages in split_stages: + extra_params = _stages[0].get('extra_params', {}) + active_id, passive_id = _stages[0]['active']['object_id'], _stages[0]['passive']['object_id'] + arm = extra_params.get('arm', 'right') + action_stages = generate_action_stages(objects, _stages, self.robot) + if not len(action_stages): + success = False + print('No action stage generated.') + break + + # Execution + success = True + + for action, substages in action_stages: + stage_id += 1 + print('>>>> Stage [%d] <<<<' % (stage_id + 1)) + active_id, passive_id = _stages[stage_id]['active']['object_id'], _stages[stage_id]['passive'][ + 'object_id'] + if action in ['reset']: + init_pose = self.robot.reset_pose[arm] + curr_pose = self.robot.get_ee_pose(ee_type='gripper', id=arm) + print(arm) + print(curr_pose) + interp_pose = init_pose.copy() + interp_pose[:3, 3] = curr_pose[:3, 3] + (init_pose[:3, 3] - curr_pose[:3, 3]) * 0.25 + success = self.robot.move_pose(self.robot.reset_pose[arm], type='AvoidObs', arm=arm, block=True) + continue + + # if action in ['grasp', 'pick']: + # obj_id = substages.passive_obj_id + # if obj_id.split('/')[0] not in self.articulated_objs: + # self.robot.target_object = substages.passive_obj_id + + while len(substages): + # get next step actionddd + objects = self.update_objects(objects, arm=arm) + + target_gripper_pose, motion_type, gripper_action, arm = substages.get_action(objects) + arm = extra_params.get('arm', 'right') + self.robot.client.set_frame_state(action, substages.step_id, active_id, passive_id, + self.attached_obj_id is not None, arm=arm, + target_pose=target_gripper_pose) + + # execution action + if target_gripper_pose is not None: + self.robot.move_pose(target_gripper_pose, motion_type, arm=arm, block=True) + + set_gripper_open = gripper_action == 'open' + set_gripper_close = gripper_action == 'close' + self.robot.client.set_frame_state(action, substages.step_id, active_id, passive_id, + self.attached_obj_id is not None, set_gripper_open, + set_gripper_close, arm=arm, target_pose=target_gripper_pose) + self.robot.set_gripper_action(gripper_action, arm=arm) + if gripper_action == 'open': + time.sleep(1) + + self.robot.client.set_frame_state(action, substages.step_id, active_id, passive_id, + self.attached_obj_id is not None, arm=arm, + target_pose=target_gripper_pose) + + # check sub-stage completion + objects['gripper'].obj_pose = self.robot.get_ee_pose(ee_type='gripper', id=arm) + objects = self.update_objects(objects, arm=arm) + + success = substages.check_completion(objects) + self.robot.client.set_frame_state(action, substages.step_id, active_id, passive_id, + self.attached_obj_id is not None, arm=arm, + target_pose=target_gripper_pose) + if success == False: + # import ipdb;ipdb.set_trace() + self.attached_obj_id = None + print('Failed at sub-stage %d' % substages.step_id) + break + + # attach grasped object to gripper # TODO avoid articulated objects + if gripper_action == 'close': # TODO 确定是grasp才行!! + self.attached_obj_id = substages.passive_obj_id + elif gripper_action == 'open': + self.attached_obj_id = None + self.robot.client.set_frame_state(action, substages.step_id, active_id, passive_id, + self.attached_obj_id is not None, arm=arm, + target_pose=target_gripper_pose) + + # change object position + num_against = substages.extra_params.get('against', 0) + against_area = substages.extra_params.get('against_range', []) + against_type = substages.extra_params.get('against_type', None) + if num_against > 0 and gripper_action == 'open' and action == 'pick' and ( + against_type is not None): + parts = against_type.split('_') + need_move_objects = [passive_id] + if parts[0] == 'move' and parts[1] == 'with': + for workspace in objects: + if parts[2] in workspace: + need_move_objects.append(workspace) + ## 目前就集体向一个方向移动 + offset_y = random.uniform(0, 0.2) + poses = [] + for move_object in need_move_objects: + response = self.robot.client.get_object_pose(f'/World/Objects/{move_object}') + pos = [response.object_pose.position.x, + response.object_pose.position.y + offset_y, + response.object_pose.position.z] + quat_wxyz = np.array( + [ + response.object_pose.rpy.rw, + response.object_pose.rpy.rx, + response.object_pose.rpy.ry, + response.object_pose.rpy.rz, + ] + ) + object_pose = {} + object_pose["prim_path"] = f'/World/Objects/{move_object}' + object_pose["position"] = pos + object_pose["rotation"] = quat_wxyz + poses.append(object_pose) + print(poses) + self.robot.client.SetObjectPose(poses, []) + self.robot.client.DetachObj() + elif (num_against > 0 and gripper_action == 'open' and action == 'pick') or \ + (num_against > 0 and action == 'place' and gripper_action == 'close'): + # import ipdb;ipdb.set_trace() + x_min, x_max = 999.0, -999.0 + y_min, y_max = 999.0, -999.0 + if against_area: + selected_against_area = random.choice(against_area) + if selected_against_area in workspaces: + position = workspaces[selected_against_area]['position'] + size = workspaces[selected_against_area]['size'] + x_min, x_max = position[0] - size[0] / 2, position[0] + size[0] / 2 + y_min, y_max = position[1] - size[1] / 2, position[1] + size[1] / 2 + x_size, y_size, z_size = objects[passive_id].info['size'] + up_axis = objects[passive_id].info['upAxis'] + axis_mapping = { + 'x': (y_size / 2000.0, z_size / 2000.0), + 'y': (x_size / 2000.0, z_size / 2000.0), + 'z': (x_size / 2000.0, y_size / 2000.0) + } + dimensions = axis_mapping[up_axis[0]] + distance = np.linalg.norm(dimensions) + x_min += distance * 1.5 + x_max -= distance * 1.5 + y_min += distance * 1.5 + y_max -= distance * 1.5 + else: + print("against_range not set") + continue + response = self.robot.client.get_object_pose(f'/World/Objects/{passive_id}') + pos = [response.object_pose.position.x, + response.object_pose.position.y, + response.object_pose.position.z + 0.02] + quat_wxyz = np.array( + [ + response.object_pose.rpy.rw, + response.object_pose.rpy.rx, + response.object_pose.rpy.ry, + response.object_pose.rpy.rz, + ] + ) + # import ipdb;ipdb.set_trace() + # if position is close to the gripper, then random position again + while True: + pos[0] = random.uniform(x_min, x_max) + pos[1] = random.uniform(y_min, y_max) + distance = np.linalg.norm( + [pos[0] - target_gripper_pose[0][3], pos[1] - target_gripper_pose[1][3]]) + if distance >= 0.2: + break + poses = [] + object_pose = {} + object_pose["prim_path"] = f'/World/Objects/{passive_id}' + object_pose["position"] = pos + object_pose["rotation"] = quat_wxyz + poses.append(object_pose) + print(poses) + self.robot.client.SetObjectPose(poses, []) + self.robot.client.DetachObj() + # if num_against > 0 and action == 'place' and gripper_action == 'close': + # offset = random.uniform(-0.1, 0.1) + # response = self.robot.client.get_object_pose('/World/Objects/%s'%passive_id) + # pos = [response.object_pose.position.x, + # response.object_pose.position.y - offset, + # response.object_pose.position.z] + # quat_wxyz = np.array( + # [ + # response.object_pose.rpy.rw, + # response.object_pose.rpy.rx, + # response.object_pose.rpy.ry, + # response.object_pose.rpy.rz, + # ] + # ) + # # import ipdb;ipdb.set_trace() + # poses = [] + # object_pose ={} + # object_pose["prim_path"] = '/World/Objects/' + passive_id + # object_pose["position"] =pos + # object_pose["rotation"] = quat_wxyz + # poses.append(object_pose) + # self.robot.client.SetObjectPose(poses, []) + # index += 1 + # self.robot.client.DetachObj() + + if success == False: + self.attached_obj_id = None + break + if self.attached_obj_id is not None: + if self.attached_obj_id.split('/')[0] not in self.articulated_objs: + self.robot.client.DetachObj() + self.robot.client.AttachObj(prim_paths=['/World/Objects/' + self.attached_obj_id], + is_right=arm == 'right') + if success == False: + break + time.sleep(0.5) + if self.attached_obj_id is None: + self.robot.client.DetachObj() + self.robot.client.stop_recording() + # try: + # step_id = substages.step_id if substages is not None and len(substages) else -1 + # except: + # step_id = -1 + step_id = -1 + fail_stage_step = [stage_id, step_id] if success == False else [-1, -1] + + task_info_saved = task_info.copy() + self.robot.client.SendTaskStatus(success, fail_stage_step) + if success: + print(">>>>>>>>>>>>>>>> Success!!!!!!!!!!!!!!!!") + + return True diff --git a/data_gen_dependencies/robot.py b/data_gen_dependencies/robot.py new file mode 100644 index 0000000..278fef9 --- /dev/null +++ b/data_gen_dependencies/robot.py @@ -0,0 +1,290 @@ +# 发送指令 +# 001. 拍照 +# 002. 左手/右手移动指定位姿 +# 003. 全身关节移动到指定角度 +# 004. 获取夹爪的位姿 +# 005. 获取任意物体的位姿 +# 006. 添加物体 +import numpy as np +import time + +from data_gen_dependencies import Robot +from data_gen_dependencies.client import Rpc_Client +from data_gen_dependencies.utils import ( + get_rotation_matrix_from_quaternion, + matrix_to_euler_angles, + get_quaternion_from_euler, + get_quaternion_from_rotation_matrix, +) + + +class IsaacSimRpcRobot(Robot): + def __init__(self, robot_cfg, robot_usd, scene_usd, client_host="localhost:50051"): + self.client = Rpc_Client(client_host) + self.client.InitRobot(robot_cfg, robot_usd, scene_usd) + self.cam_info = None + self.setup() + + def reset(self): + self.target_object = None + self.client.reset() + time.sleep(1) + self.open_gripper(id='right') + pass + + def setup(self): + self.target_object = None + + # set robot init state + + def get_observation(self, data_keys): + """ + Example + data_keys = { + 'camera': { + 'camera_prim_list': [ + '/World/Raise_A2_W_T1/head_link/D455_Solid/TestCameraDepth' + ], + 'render_depth': True, + 'render_semantic': True + }, + 'pose': [ + '/World/Raise_A2_W_T1/head_link/D455_Solid/TestCameraDepth' + ], + 'joint_position': True, + 'gripper': True + } + """ + + observation = {} + observation = self.client.get_observation(data_keys) + + if "camera" in data_keys: + render_depth = data_keys["camera"]["render_depth"] if "render_depth" in data_keys["camera"] else False + render_semantic = ( + data_keys["camera"]["render_semantic"] if "render_semantic" in data_keys["camera"] else False + ) + + cam_data = {} + for cam_prim in data_keys["camera"]["camera_prim_list"]: + cam_data[cam_prim] = {} + response = self.client.capture_frame(camera_prim_path=cam_prim) + # cam_info + cam_info = { + "W": response.color_info.width, + "H": response.color_info.height, + "K": np.array( + [ + [response.color_info.fx, 0, response.color_info.ppx], + [0, response.color_info.fy, response.color_info.ppy], + [0, 0, 1], + ] + ), + "scale": 1, + } + cam_data[cam_prim]["cam_info"] = cam_info + # c2w + c2w = self.get_prim_world_pose(cam_prim, camera=True) + cam_data[cam_prim]['c2w'] = c2w + # rgb + rgb = np.frombuffer(response.color_image.data, dtype=np.uint8).reshape(cam_info["H"], cam_info["W"], 4)[ + :, :, :3 + ] + cam_data[cam_prim]["image"] = rgb + # depth + if render_depth: + depth = np.frombuffer(response.depth_image.data, dtype=np.float32).reshape( + cam_info["H"], cam_info["W"] + ) + cam_data[cam_prim]["depth"] = depth + + # semantic + if render_semantic: + response = self.client.capture_semantic_frame(camera_prim_path=cam_prim) + prim_id = {} + for label in response.label_dict: + name, id = label.label_name, label.label_id + prim = '/World/Objects/' + name + prim_id[prim] = id + mask = np.frombuffer(response.semantic_mask.data, dtype=np.int32).reshape(cam_info['H'], + cam_info['W']) + cam_data[cam_prim]['semantic'] = { + 'prim_id': prim_id, + 'mask': mask + } + + observation["camera"] = cam_data + + if "pose" in data_keys: + pose_data = {} + for obj_prim in data_keys["pose"]: + pose_data[obj_prim] = self.get_prim_world_pose(prim_path=obj_prim) + observation["pose"] = pose_data + + if "joint_position" in data_keys: + # TODO 是否要区分双臂? + joint_position = self.client.get_joint_positions() + observation["joint_position"] = joint_position + + if "gripper" in data_keys: + gripper_state = {} + gripper_state["left"] = self.client.get_gripper_state(is_right=False) + gripper_state["right"] = self.client.get_gripper_state(is_right=True) + observation["gripper"] = gripper_state + + return observation + + def open_gripper(self, id="left", width=0.1): + is_Right = True if id == "right" else False + if width is None: + width = 0.1 + self.client.set_gripper_state(gripper_command="open", is_right=is_Right, opened_width=width) + + self.client.DetachObj() + + def close_gripper(self, id="left", force=50): + is_Right = True if id == "right" else False + self.client.set_gripper_state(gripper_command="close", is_right=is_Right, opened_width=0.00) + + # if self.target_object is not None: + # # self.client.DetachObj() + # self.client.AttachObj(prim_paths=['/World/Objects/'+self.target_object]) + + def move(self, content): + """ + type: str, 'matrix' or 'joint' + 'pose': np.array, 4x4 + 'joint': np.array, 1xN + """ + type = content["type"] + if type == "matrix": + if isinstance(content["matrix"], list): + content["matrix"] = np.array(content["matrix"]) + R, T = content["matrix"][:3, :3], content["matrix"][:3, 3] + quat_wxyz = get_quaternion_from_euler(matrix_to_euler_angles(R), order="ZYX") + + if "trajectory_type" in content and content["trajectory_type"] == "Simple": + is_backend = True + else: + is_backend = False + state = ( + self.client.moveto( + target_position=T, + target_quaternion=quat_wxyz, + arm_name="left", + is_backend=is_backend, + ).errmsg + == "True" + ) + # if is_backend == True: + # time.sleep(1) + elif type == "quat": + + if "trajectory_type" in content and content["trajectory_type"] == "Simple": + is_backend = True + else: + is_backend = False + state = ( + self.client.moveto( + target_position=content["xyz"], + target_quaternion=content["quat"], + arm_name="left", + is_backend=is_backend, + ).errmsg + == "True" + ) + + # if is_backend == True: + # time.sleep(4) + elif type == "joint": + state = self.client.set_joint_positions(content["position"]) + else: + raise NotImplementedError + + return state + + def get_prim_world_pose(self, prim_path, camera=False): + rotation_x_180 = np.array([[1.0, 0.0, 0.0, 0], [0.0, -1.0, 0.0, 0], [0.0, 0.0, -1.0, 0], [0, 0, 0, 1]]) + response = self.client.get_object_pose(prim_path=prim_path) + x, y, z = ( + response.object_pose.position.x, + response.object_pose.position.y, + response.object_pose.position.z, + ) + quat_wxyz = np.array( + [ + response.object_pose.rpy.rw, + response.object_pose.rpy.rx, + response.object_pose.rpy.ry, + response.object_pose.rpy.rz, + ] + ) + rot_mat = get_rotation_matrix_from_quaternion(quat_wxyz) + + pose = np.eye(4) + pose[:3, :3] = rot_mat + pose[:3, 3] = np.array([x, y, z]) + + if camera: + pose = pose @ rotation_x_180 + return pose + + def pose_from_cam_to_robot(self, pose2cam): + """transform pose from cam-coordinate to robot-coordinate""" + cam2world = self.get_prim_world_pose(prim_path=self.base_camera, camera=True) + pose2world = cam2world @ pose2cam + return pose2world + + def pose_from_world_to_cam(self, pose2world): + """transform pose from world-coordinate to cam-coordinate""" + cam2world = self.get_prim_world_pose(prim_path=self.base_camera, camera=True) + pose2cam = np.linalg.inv(cam2world) @ pose2world + return pose2cam + + def decode_gripper_pose(self, gripper_pose): + """Decode gripper-pose at cam-coordinate to end-pose at robot-coordinate""" + gripper_pose = self.pose_from_cam_to_robot(gripper_pose) + angle = 0 # np.pi / 2 + rot_z = np.array( + [ + [np.cos(angle), -np.sin(angle), 0], + [np.sin(angle), np.cos(angle), 0], + [0, 0, 1], + ] + ) + + flange2gripper = np.eye(4) + flange2gripper[:3, :3] = rot_z + flange2gripper[2, 3] = -0.015 + return gripper_pose @ flange2gripper + + def get_ee_pose(self, id="right", return_matrix=True, **kwargs): + state = self.client.GetEEPose(is_right=id == "right") + xyz = np.array( + [ + state.ee_pose.position.x, + state.ee_pose.position.y, + state.ee_pose.position.z, + ] + ) + quat = np.array( + [ + state.ee_pose.rpy.rw, + state.ee_pose.rpy.rx, + state.ee_pose.rpy.ry, + state.ee_pose.rpy.rz, + ] + ) + pose = np.eye(4) + pose[:3, 3] = xyz + pose[:3, :3] = get_rotation_matrix_from_quaternion(quat) + print(xyz, quat) + if return_matrix: + return pose + else: + return xyz, quat + + def check_ik(self, pose, id="right"): + xyz, quat = pose[:3, 3], get_quaternion_from_rotation_matrix(pose[:3, :3]) + state = self.client.GetIKStatus(target_position=xyz, target_rotation=quat, is_right=id == "right") + return state.isSuccess diff --git a/data_gen_dependencies/transforms.py b/data_gen_dependencies/transforms.py new file mode 100644 index 0000000..891a887 --- /dev/null +++ b/data_gen_dependencies/transforms.py @@ -0,0 +1,149 @@ +import numpy as np +from numpy import ndarray +from scipy.spatial.transform import Rotation as R + + +def calculate_rotation_matrix2(v1, v2): + """ Calculate the rotation matrix that aligns v1 to v2 """ + v1 = v1 / np.linalg.norm(v1) + v2 = v2 / np.linalg.norm(v2) + rot_vec = np.cross(v1, v2) + rot_angle = np.arccos(np.dot(v1, v2)) + # 检查是否共线相反 + if np.allclose(v1, -v2): + # 选择一个垂直于v1和v2的向量作为旋转轴 + # 这里选择z轴方向的向量(0, 0, 1) + # 如果v1和v2是z轴方向的,可以选择x轴或y轴 + rot_vec = np.cross(v1, [0, 0, 1]) + if np.linalg.norm(rot_vec) < 1e-10: # 处理特殊情况 + rot_vec = np.cross(v1, [1, 0, 0]) # 选择x轴作为旋转轴 + rot_vec = rot_vec / np.linalg.norm(rot_vec) + rot_angle = np.pi # 180度旋转 + + else: + rot_vec = np.cross(v1, v2) + rot_angle = np.arccos(np.dot(v1, v2)) + # Calculate the rotation matrix + rotation_matrix = R.from_rotvec(rot_vec * rot_angle).as_matrix() + return rotation_matrix + + +def get_cross_prod_mat(pVec_Arr): + # pVec_Arr shape (3) + qCross_prod_mat = np.array([ + [0, -pVec_Arr[2], pVec_Arr[1]], + [pVec_Arr[2], 0, -pVec_Arr[0]], + [-pVec_Arr[1], pVec_Arr[0], 0], + ]) + return qCross_prod_mat + + +def calculate_rotation_matrix(v1, v2): + scale = np.linalg.norm(v2) + v2 = v2 / scale + # must ensure pVec_Arr is also a unit vec. + z_mat = get_cross_prod_mat(v1) + + z_c_vec = np.matmul(z_mat, v2) + z_c_vec_mat = get_cross_prod_mat(z_c_vec) + + if np.dot(v1, v2) == -1: + qTrans_Mat = -np.eye(3, 3) + elif np.dot(v1, v2) == 1: + qTrans_Mat = np.eye(3, 3) + else: + qTrans_Mat = np.eye(3, 3) + z_c_vec_mat + np.matmul(z_c_vec_mat, + z_c_vec_mat) / (1 + np.dot(v1, v2)) + + qTrans_Mat *= scale + return qTrans_Mat + + +def transform_coordinates_3d(coordinates: ndarray, sRT: ndarray): + """Apply 3D affine transformation to pointcloud. + + :param coordinates: ndarray of shape [3, N] + :param sRT: ndarray of shape [4, 4] + + :returns: new pointcloud of shape [3, N] + """ + assert coordinates.shape[0] == 3 + coordinates = np.vstack( + [coordinates, np.ones((1, coordinates.shape[1]), dtype=np.float32)] + ) + new_coordinates = sRT @ coordinates + new_coordinates = new_coordinates[:3, :] / new_coordinates[3, :] + return new_coordinates + + +def calculate_2d_projections(coordinates_3d: ndarray, intrinsics_K: ndarray): + """ + :param coordinates_3d: [3, N] + :param intrinsics_K: K matrix [3, 3] (the return value of :func:`.data_types.CameraIntrinsicsBase.to_matrix`) + + :returns: projected_coordinates: [N, 2] + """ + projected_coordinates = intrinsics_K @ coordinates_3d + projected_coordinates = projected_coordinates[:2, :] / projected_coordinates[2, :] + projected_coordinates = projected_coordinates.transpose() + projected_coordinates = np.array(projected_coordinates, dtype=np.int32) + + return projected_coordinates + + +def rotate_around_axis(pose, P1, vector, angle_delta): + """ + 让一个物体绕着世界坐标系中的一个轴旋转。 + + 参数: + pose : np.ndarray + 4x4的物体姿态矩阵 + P1 : np.ndarray + 旋转轴的起点,形状为(3,) + P2 : np.ndarray + 旋转轴的终点,形状为(3,) + theta : float + 旋转角度(弧度) + + 返回: + np.ndarray + 旋转后的4x4姿态矩阵 + """ + + # 计算旋转轴方向向量 + v = vector + # 归一化方向向量 + u = v / np.linalg.norm(v) + theta = np.radians(angle_delta) + + # 计算Rodrigues' rotation formula的矩阵K + ux, uy, uz = u + K = np.array([ + [0, -uz, uy], + [uz, 0, -ux], + [-uy, ux, 0] + ]) + + # 计算旋转矩阵R + I = np.eye(3) + R = I + np.sin(theta) * K + (1 - np.cos(theta)) * np.dot(K, K) + + # 将R转换为4x4形式 + R_4x4 = np.eye(4) + R_4x4[:3, :3] = R + + # 构建平移矩阵T1 + T1 = np.eye(4) + T1[:3, 3] = -P1 + + # 构建平移矩阵T2 + T2 = np.eye(4) + T2[:3, 3] = P1 + + # 组合变换矩阵 + M = T2 @ R_4x4 @ T1 + + # 应用变换到原始姿态矩阵 + new_pose = M @ pose + + return new_pose \ No newline at end of file diff --git a/data_gen_dependencies/utils.py b/data_gen_dependencies/utils.py new file mode 100644 index 0000000..84ef97a --- /dev/null +++ b/data_gen_dependencies/utils.py @@ -0,0 +1,558 @@ +def quaternion_rotate(quaternion, axis, angle): + """ + Rotate a quaternion around a specified axis by a given angle. + + Parameters: + quaternion (numpy array): The input quaternion [w, x, y, z]. + axis (str): The axis to rotate around ('x', 'y', or 'z'). + angle (float): The rotation angle in degrees. + + Returns: + numpy array: The rotated quaternion. + """ + # Convert angle from degrees to radians + angle_rad = np.radians(angle) + + # Calculate the rotation quaternion based on the specified axis + cos_half_angle = np.cos(angle_rad / 2) + sin_half_angle = np.sin(angle_rad / 2) + + if axis == 'x': + q_axis = np.array([cos_half_angle, sin_half_angle, 0, 0]) + elif axis == 'y': + q_axis = np.array([cos_half_angle, 0, sin_half_angle, 0]) + elif axis == 'z': + q_axis = np.array([cos_half_angle, 0, 0, sin_half_angle]) + else: + raise ValueError("Unsupported axis. Use 'x', 'y', or 'z'.") + + # Extract components of the input quaternion + w1, x1, y1, z1 = quaternion + w2, x2, y2, z2 = q_axis + + # Quaternion multiplication + w = w1 * w2 - x1 * x2 - y1 * y2 - z1 * z2 + x = w1 * x2 + x1 * w2 + y1 * z2 - z1 * y2 + y = w1 * y2 - x1 * z2 + y1 * w2 + z1 * x2 + z = w1 * z2 + x1 * y2 - y1 * x2 + z1 * w2 + + return np.array([w, x, y, z]) + + +def axis_to_quaternion(axis, target_axis='y'): + """ + Calculate the quaternion that rotates a given axis to the target axis. + + Parameters: + axis (str): The axis in the object's local coordinate system ('x', 'y', or 'z'). + target_axis (str): The target axis in the world coordinate system ('x', 'y', or 'z'). + + Returns: + numpy array: The quaternion representing the rotation. + """ + # Define unit vectors for each axis + unit_vectors = { + 'x': np.array([1, 0, 0]), + 'y': np.array([0, 1, 0]), + 'z': np.array([0, 0, 1]) + } + + if axis not in unit_vectors or target_axis not in unit_vectors: + raise ValueError("Unsupported axis. Use 'x', 'y', or 'z'.") + + v1 = unit_vectors[axis] + v2 = unit_vectors[target_axis] + + # Calculate the cross product and dot product + cross_prod = np.cross(v1, v2) + dot_prod = np.dot(v1, v2) + + # Calculate the quaternion + w = np.sqrt((np.linalg.norm(v1) ** 2) * (np.linalg.norm(v2) ** 2)) + dot_prod + x, y, z = cross_prod + + # Normalize the quaternion + q = np.array([w, x, y, z]) + q = q / np.linalg.norm(q) + + return q + + +def is_y_axis_up(pose_matrix): + """ + 判断物体在世界坐标系中的 y 轴是否朝上。 + + 参数: + pose_matrix (numpy.ndarray): 4x4 齐次变换矩阵。 + + 返回: + bool: True 如果 y 轴朝上,False 如果 y 轴朝下。 + """ + # 提取旋转矩阵的第二列 + y_axis_vector = pose_matrix[:3, 1] + + # 世界坐标系的 y 轴 + world_y_axis = np.array([0, 1, 0]) + + # 计算点积 + dot_product = np.dot(y_axis_vector, world_y_axis) + + # 返回 True 如果朝上,否则返回 False + return dot_product > 0 + + +def is_local_axis_facing_world_axis(pose_matrix, local_axis='y', world_axis='z'): + # 定义局部坐标系的轴索引 + local_axis_index = {'x': 0, 'y': 1, 'z': 2} + + # 定义世界坐标系的轴向量 + world_axes = { + 'x': np.array([1, 0, 0]), + 'y': np.array([0, 1, 0]), + 'z': np.array([0, 0, 1]) + } + + # 提取局部坐标系的指定轴 + local_axis_vector = pose_matrix[:3, local_axis_index[local_axis]] + + # 获取世界坐标系的指定轴向量 + world_axis_vector = world_axes[world_axis] + + # 计算点积 + dot_product = np.dot(local_axis_vector, world_axis_vector) + + # 返回 True 如果朝向指定世界轴,否则返回 False + return dot_product > 0 + + +def rotate_180_along_axis(target_affine, rot_axis='z'): + ''' + gripper是对称结构,绕Z轴旋转180度前后是等效的。选择离当前pose更近的target pose以避免不必要的旋转 + ''' + if rot_axis == 'z': + R_180 = np.array([[-1, 0, 0], + [0, -1, 0], + [0, 0, 1]]) + elif rot_axis == 'y': + R_180 = np.array([[-1, 0, 0], + [0, 1, 0], + [0, 0, -1]]) + elif rot_axis == 'x': + R_180 = np.array([[1, 0, 0], + [0, -1, 0], + [0, 0, -1]]) + else: + assert False, "Invalid rotation axis. Please choose from 'x', 'y', 'z'." + + # 提取旋转部分(3x3矩阵) + target_rotation = target_affine[:3, :3] + # 将target_rotation绕其自身的Z轴转180度,得到target_rotation_2 + target_rotation_2 = np.dot(target_rotation, R_180) + + # 重新组合旋转矩阵target_rotation_2和原始的平移部分 + target_affine_2 = np.eye(4) + target_affine_2[:3, :3] = target_rotation_2 + target_affine_2[:3, 3] = target_affine[:3, 3] # 保留原始的平移部分 + return target_affine_2 + + +def rotate_along_axis(target_affine, angle_degrees, rot_axis='z', use_local=True): + ''' + 根据指定的角度和旋转轴来旋转target_affine。 + 参数: + - target_affine: 4x4 仿射变换矩阵 + - angle_degrees: 旋转角度(以度为单位) + - rot_axis: 旋转轴,'x'、'y' 或 'z' + ''' + # 将角度转换为弧度 + angle_radians = np.deg2rad(angle_degrees) + + # 创建旋转对象 + if rot_axis == 'z': + rotation_vector = np.array([0, 0, angle_radians]) + elif rot_axis == 'y': + rotation_vector = np.array([0, angle_radians, 0]) + elif rot_axis == 'x': + rotation_vector = np.array([angle_radians, 0, 0]) + else: + raise ValueError("Invalid rotation axis. Please choose from 'x', 'y', 'z'.") + + # 生成旋转矩阵 + R_angle = R.from_rotvec(rotation_vector).as_matrix() + + # 提取旋转部分(3x3矩阵) + target_rotation = target_affine[:3, :3] + + # 将 target_rotation 绕指定轴旋转指定角度,得到 target_rotation_2 + if use_local: + target_rotation_2 = np.dot(target_rotation, R_angle) + else: + target_rotation_2 = np.dot(R_angle, target_rotation) + + # 重新组合旋转矩阵 target_rotation_2 和原始的平移部分 + target_affine_2 = np.eye(4) + target_affine_2[:3, :3] = target_rotation_2 + target_affine_2[:3, 3] = target_affine[:3, 3] # 保留原始的平移部分 + + return target_affine_2 + + +def rotate_along_axis(target_affine, angle_degrees, rot_axis='z', use_local=True): + ''' + 根据指定的角度和旋转轴来旋转target_affine。 + 参数: + - target_affine: 4x4 仿射变换矩阵 + - angle_degrees: 旋转角度(以度为单位) + - rot_axis: 旋转轴,'x'、'y' 或 'z' + ''' + # 将角度转换为弧度 + angle_radians = np.deg2rad(angle_degrees) + + # 创建旋转对象 + if rot_axis == 'z': + rotation_vector = np.array([0, 0, angle_radians]) + elif rot_axis == 'y': + rotation_vector = np.array([0, angle_radians, 0]) + elif rot_axis == 'x': + rotation_vector = np.array([angle_radians, 0, 0]) + else: + raise ValueError("Invalid rotation axis. Please choose from 'x', 'y', 'z'.") + + # 生成旋转矩阵 + R_angle = R.from_rotvec(rotation_vector).as_matrix() + + # 提取旋转部分(3x3矩阵) + target_rotation = target_affine[:3, :3] + + # 将 target_rotation 绕指定轴旋转指定角度,得到 target_rotation_2 + if use_local: + target_rotation_2 = np.dot(target_rotation, R_angle) + else: + target_rotation_2 = np.dot(R_angle, target_rotation) + + # 重新组合旋转矩阵 target_rotation_2 和原始的平移部分 + target_affine_2 = np.eye(4) + target_affine_2[:3, :3] = target_rotation_2 + target_affine_2[:3, 3] = target_affine[:3, 3] # 保留原始的平移部分 + + return target_affine_2 + + +import numpy as np +from scipy.spatial.transform import Rotation as R + + +def get_quaternion_wxyz_from_rotation_matrix(rotation_matrix): + """ + Convert a 3x3 rotation matrix to a quaternion in the wxyz format. + + Parameters: + R (numpy array): A 3x3 rotation matrix. + + Returns: + numpy array: A 4x1 quaternion in the wxyz format. + """ + # Convert the rotation matrix to a quaternion + rot = R.from_matrix(rotation_matrix) + quat = rot.as_quat() + + # Reorder the quaternion to the wxyz format + if quat.shape[0] == 4: + quaternions_wxyz = quat[[3, 0, 1, 2]] + else: + quaternions_wxyz = quat[:, [3, 0, 1, 2]] + return quaternions_wxyz + + +def get_quaternion_from_rotation_matrix(R): + assert R.shape == (3, 3) + + # 计算四元数分量 + trace = np.trace(R) + if trace > 0: + S = np.sqrt(trace + 1.0) * 2 # S=4*qw + qw = 0.25 * S + qx = (R[2, 1] - R[1, 2]) / S + qy = (R[0, 2] - R[2, 0]) / S + qz = (R[1, 0] - R[0, 1]) / S + elif (R[0, 0] > R[1, 1]) and (R[0, 0] > R[2, 2]): + S = np.sqrt(1.0 + R[0, 0] - R[1, 1] - R[2, 2]) * 2 # S=4*qx + qw = (R[2, 1] - R[1, 2]) / S + qx = 0.25 * S + qy = (R[0, 1] + R[1, 0]) / S + qz = (R[0, 2] + R[2, 0]) / S + elif R[1, 1] > R[2, 2]: + S = np.sqrt(1.0 + R[1, 1] - R[0, 0] - R[2, 2]) * 2 # S=4*qy + qw = (R[0, 2] - R[2, 0]) / S + qx = (R[0, 1] + R[1, 0]) / S + qy = 0.25 * S + qz = (R[1, 2] + R[2, 1]) / S + else: + S = np.sqrt(1.0 + R[2, 2] - R[0, 0] - R[1, 1]) * 2 # S=4*qz + qw = (R[1, 0] - R[0, 1]) / S + qx = (R[0, 2] + R[2, 0]) / S + qy = (R[1, 2] + R[2, 1]) / S + qz = 0.25 * S + + return np.array([qw, qx, qy, qz]) + + +# @nb.jit(nopython=True) +def get_rotation_matrix_from_quaternion(quat: np.ndarray) -> np.ndarray: + """Convert a quaternion to a rotation matrix. + + Args: + quat (np.ndarray): A 4x1 vector in order (w, x, y, z) + + Returns: + np.ndarray: The resulting 3x3 rotation matrix. + """ + w, x, y, z = quat + rot = np.array( + [ + [2 * (w ** 2 + x ** 2) - 1, 2 * (x * y - w * z), 2 * (x * z + w * y)], + [2 * (x * y + w * z), 2 * (w ** 2 + y ** 2) - 1, 2 * (y * z - w * x)], + [2 * (x * z - w * y), 2 * (y * z + w * x), 2 * (w ** 2 + z ** 2) - 1], + ] + ) + return rot + + +# @nb.jit(nopython=True) +def get_xyz_euler_from_quaternion(quat: np.ndarray) -> np.ndarray: + """Convert a quaternion to XYZ euler angles. + + Args: + quat (np.ndarray): A 4x1 vector in order (w, x, y, z). + + Returns: + np.ndarray: A 3x1 vector containing (roll, pitch, yaw). + """ + w, x, y, z = quat + y_sqr = y * y + + t0 = +2.0 * (w * x + y * z) + t1 = +1.0 - 2.0 * (x * x + y_sqr) + eulerx = np.arctan2(t0, t1) + + t2 = +2.0 * (w * y - z * x) + t2 = +1.0 if t2 > +1.0 else t2 + t2 = -1.0 if t2 < -1.0 else t2 + eulery = np.arcsin(t2) + + t3 = +2.0 * (w * z + x * y) + t4 = +1.0 - 2.0 * (y_sqr + z * z) + eulerz = np.arctan2(t3, t4) + + result = np.zeros(3) + result[0] = eulerx + result[1] = eulery + result[2] = eulerz + + return result + + +# @nb.jit(nopython=True) +def get_quaternion_from_euler(euler: np.ndarray, order: str = "XYZ") -> np.ndarray: + """Convert an euler angle to a quaternion based on specified euler angle order. + + Supported Euler angle orders: {'XYZ', 'YXZ', 'ZXY', 'ZYX', 'YZX', 'XZY'}. + + Args: + euler (np.ndarray): A 3x1 vector with angles in radians. + order (str, optional): The specified order of input euler angles. Defaults to "XYZ". + + Raises: + ValueError: If input order is not valid. + + Reference: + [1] https://github.com/mrdoob/three.js/blob/master/src/math/Quaternion.js + """ + # extract input angles + r, p, y = euler + # compute constants + y = y / 2.0 + p = p / 2.0 + r = r / 2.0 + c3 = np.cos(y) + s3 = np.sin(y) + c2 = np.cos(p) + s2 = np.sin(p) + c1 = np.cos(r) + s1 = np.sin(r) + # convert to quaternion based on order + if order == "XYZ": + result = np.array( + [ + c1 * c2 * c3 - s1 * s2 * s3, + c1 * s2 * s3 + c2 * c3 * s1, + c1 * c3 * s2 - s1 * c2 * s3, + c1 * c2 * s3 + s1 * c3 * s2, + ] + ) + if result[0] < 0: + result = -result + return result + elif order == "YXZ": + result = np.array( + [ + c1 * c2 * c3 + s1 * s2 * s3, + s1 * c2 * c3 + c1 * s2 * s3, + c1 * s2 * c3 - s1 * c2 * s3, + c1 * c2 * s3 - s1 * s2 * c3, + ] + ) + return result + elif order == "ZXY": + result = np.array( + [ + c1 * c2 * c3 - s1 * s2 * s3, + s1 * c2 * c3 - c1 * s2 * s3, + c1 * s2 * c3 + s1 * c2 * s3, + c1 * c2 * s3 + s1 * s2 * c3, + ] + ) + return result + elif order == "ZYX": + result = np.array( + [ + c1 * c2 * c3 + s1 * s2 * s3, + s1 * c2 * c3 - c1 * s2 * s3, + c1 * s2 * c3 + s1 * c2 * s3, + c1 * c2 * s3 - s1 * s2 * c3, + ] + ) + return result + elif order == "YZX": + result = np.array( + [ + c1 * c2 * c3 - s1 * s2 * s3, + s1 * c2 * c3 + c1 * s2 * s3, + c1 * s2 * c3 + s1 * c2 * s3, + c1 * c2 * s3 - s1 * s2 * c3, + ] + ) + return result + elif order == "XZY": + result = np.array( + [ + c1 * c2 * c3 + s1 * s2 * s3, + s1 * c2 * c3 - c1 * s2 * s3, + c1 * s2 * c3 - s1 * c2 * s3, + c1 * c2 * s3 + s1 * s2 * c3, + ] + ) + return result + else: + raise ValueError("Input euler angle order is meaningless.") + + +# @nb.jit(nopython=True) +def get_rotation_matrix_from_euler(euler: np.ndarray, order: str = "XYZ") -> np.ndarray: + quat = get_quaternion_from_euler(euler, order) + return get_rotation_matrix_from_quaternion(quat) + + +# @nb.jit(nopython=True) +def quat_multiplication(q: np.ndarray, p: np.ndarray) -> np.ndarray: + """Compute the product of two quaternions. + + Args: + q (np.ndarray): First quaternion in order (w, x, y, z). + p (np.ndarray): Second quaternion in order (w, x, y, z). + + Returns: + np.ndarray: A 4x1 vector representing a quaternion in order (w, x, y, z). + """ + quat = np.array( + [ + p[0] * q[0] - p[1] * q[1] - p[2] * q[2] - p[3] * q[3], + p[0] * q[1] + p[1] * q[0] - p[2] * q[3] + p[3] * q[2], + p[0] * q[2] + p[1] * q[3] + p[2] * q[0] - p[3] * q[1], + p[0] * q[3] - p[1] * q[2] + p[2] * q[1] + p[3] * q[0], + ] + ) + return quat + + +# @nb.jit(nopython=True) +def skew(vector: np.ndarray) -> np.ndarray: + """Convert vector to skew symmetric matrix. + + This function returns a skew-symmetric matrix to perform cross-product + as a matrix multiplication operation, i.e.: + + np.cross(a, b) = np.dot(skew(a), b) + + + Args: + vector (np.ndarray): A 3x1 vector. + + Returns: + np.ndarray: The resluting skew-symmetric matrix. + """ + mat = np.array([[0, -vector[2], vector[1]], [vector[2], 0, -vector[0]], [-vector[1], vector[0], 0]]) + return mat + + +import math + +_POLE_LIMIT = 1.0 - 1e-6 + + +def matrix_to_euler_angles(mat: np.ndarray, degrees: bool = False, extrinsic: bool = True) -> np.ndarray: + """Convert rotation matrix to Euler XYZ extrinsic or intrinsic angles. + + Args: + mat (np.ndarray): A 3x3 rotation matrix. + degrees (bool, optional): Whether returned angles should be in degrees. + extrinsic (bool, optional): True if the rotation matrix follows the extrinsic matrix + convention (equivalent to ZYX ordering but returned in the reverse) and False if it follows + the intrinsic matrix conventions (equivalent to XYZ ordering). + Defaults to True. + + Returns: + np.ndarray: Euler XYZ angles (intrinsic form) if extrinsic is False and Euler XYZ angles (extrinsic form) if extrinsic is True. + """ + if extrinsic: + if mat[2, 0] > _POLE_LIMIT: + roll = np.arctan2(mat[0, 1], mat[0, 2]) + pitch = -np.pi / 2 + yaw = 0.0 + return np.array([roll, pitch, yaw]) + + if mat[2, 0] < -_POLE_LIMIT: + roll = np.arctan2(mat[0, 1], mat[0, 2]) + pitch = np.pi / 2 + yaw = 0.0 + return np.array([roll, pitch, yaw]) + + roll = np.arctan2(mat[2, 1], mat[2, 2]) + pitch = -np.arcsin(mat[2, 0]) + yaw = np.arctan2(mat[1, 0], mat[0, 0]) + if degrees: + roll = math.degrees(roll) + pitch = math.degrees(pitch) + yaw = math.degrees(yaw) + return np.array([roll, pitch, yaw]) + else: + if mat[0, 2] > _POLE_LIMIT: + roll = np.arctan2(mat[1, 0], mat[1, 1]) + pitch = np.pi / 2 + yaw = 0.0 + return np.array([roll, pitch, yaw]) + + if mat[0, 2] < -_POLE_LIMIT: + roll = np.arctan2(mat[1, 0], mat[1, 1]) + pitch = -np.pi / 2 + yaw = 0.0 + return np.array([roll, pitch, yaw]) + roll = -math.atan2(mat[1, 2], mat[2, 2]) + pitch = math.asin(mat[0, 2]) + yaw = -math.atan2(mat[0, 1], mat[0, 0]) + + if degrees: + roll = math.degrees(roll) + pitch = math.degrees(pitch) + yaw = math.degrees(yaw) + return np.array([roll, pitch, yaw]) diff --git a/runners/data_generator.py b/runners/data_generator.py index 1598d70..7d533e3 100644 --- a/runners/data_generator.py +++ b/runners/data_generator.py @@ -1,6 +1,7 @@ from pyboot import stereotype from pyboot.runner import Runner +from data_gen_dependencies.data_generate import generate_data @stereotype.runner("data_generator") class DataGenerator(Runner): def __init__(self, config_path: str): diff --git a/runners/task_generator.py b/runners/task_generator.py index e626052..031f1ce 100644 --- a/runners/task_generator.py +++ b/runners/task_generator.py @@ -1,5 +1,6 @@ import json import os +from typing import List from pyboot import stereotype from pyboot.runner import Runner @@ -15,17 +16,24 @@ class TaskGenerator(Runner): self.input_target_task_templates_path = self.generate_config["input_target_task_templates_path"] self.input_data_root = self.generate_config["input_data_root"] self.output_task_root_dir = self.generate_config.get("output_task_root_dir", None) + self.output_generate_result_path = self.generate_config.get("output_generate_result_path", None) if self.output_task_root_dir is None: self.output_task_root_dir = os.path.join(self.workspace_path, "task_root_dir") + if self.output_generate_result_path is None: + self.output_generate_result_path = os.path.join(self.workspace_path, "generate_result_path.json") self.target_task_templates = json.load(open(self.input_target_task_templates_path, "r")) def run(self): + generate_results = {} for task_template_name, task_template_path in self.target_task_templates.items(): task_template = json.load(open(task_template_path, "r")) - self.generate_from_template(task_template_name, task_template) - Log.success(f"Generated {task_template['recording_setting']['num_of_episode']} tasks from <{task_template_name}>") + generated_tasks_path_list = self.generate_from_template(task_template_name, task_template) + Log.success(f"Generated {len(generated_tasks_path_list)} tasks from <{task_template_name}>") + generate_results["task_template_name"] = generated_tasks_path_list + json.dump(generate_results, open(self.output_generate_result_path, "w")) - def generate_from_template(self, template_name: str, template: dict): + + def generate_from_template(self, template_name: str, template: dict) -> List[str]: task_dir = os.path.join(self.output_task_root_dir, template_name) task_num = template["recording_setting"]["num_of_episode"] task_name = template["task"] @@ -33,4 +41,4 @@ class TaskGenerator(Runner): old_task_generator = OldTaskGenerator(template, self.input_data_root) if not os.path.exists(task_dir): os.makedirs(task_dir) - old_task_generator.generate_tasks(save_path=task_dir, task_num=task_num, task_name=task_name) + return old_task_generator.generate_tasks(save_path=task_dir, task_num=task_num, task_name=task_name) diff --git a/task_gen_dependencies/task_generate.py b/task_gen_dependencies/task_generate.py index e73f9e6..fd8ff7f 100644 --- a/task_gen_dependencies/task_generate.py +++ b/task_gen_dependencies/task_generate.py @@ -99,6 +99,7 @@ class OldTaskGenerator: def __init__(self, task_template, data_root): self.data_root = data_root self.init_info(task_template) + self.task_template = task_template def _load_json(self, relative_path): with open(os.path.join(self.data_root, relative_path), 'r') as file: @@ -224,10 +225,11 @@ class OldTaskGenerator: def generate_tasks(self, save_path, task_num, task_name): os.makedirs(save_path, exist_ok=True) - + generated_tasks_path = [] for i in range(task_num): task_instance = deepcopy(self.task_template) output_file = os.path.join(save_path, f'{task_name}_%d.json' % (i)) + generated_tasks_path.append(output_file) task_instance['objects'] = [] task_instance['objects'] += self.fix_obj_infos task_instance['robot'] = { @@ -236,6 +238,7 @@ class OldTaskGenerator: "robot_cfg": self.robot_cfg, "stand": self.stand, } + task_instance['recording_setting'] = self.task_template["recording_setting"] flag_failed = False for key in self.layouts: @@ -258,5 +261,5 @@ class OldTaskGenerator: Log.info('Saved task json to %s' % output_file) with open(output_file, 'w') as f: json.dump(task_instance, f, indent=4) - + return generated_tasks_path # ------------------------------------------------------------- \ No newline at end of file