init commit

This commit is contained in:
zyhe
2026-03-16 11:44:10 +00:00
commit 94384a93c9
552 changed files with 363038 additions and 0 deletions

View File

@@ -0,0 +1,131 @@
import numpy as np
def transform_point_cloud(T_homo, pc):
# type: (np.ndarray, np.ndarray) -> np.ndarray
"""
Transform a point cloud using homogeous transform
:param T_homo: 4x4 transformation
:param pc: (N, 3) point cloud
:return: (N, 3) point cloud
"""
transformed_pc = T_homo[0:3, 0:3].dot(pc.T)
transformed_pc = transformed_pc.T
transformed_pc[:, 0] += T_homo[0, 3]
transformed_pc[:, 1] += T_homo[1, 3]
transformed_pc[:, 2] += T_homo[2, 3]
return transformed_pc
def xyzrpy_to_matrix(xyzrpy):
"""
Create 4x4 homogeneous transform matrix from pos and rpy
"""
xyz = xyzrpy[0:3]
rpy = xyzrpy[3:6]
T = np.zeros([4, 4], dtype=xyzrpy.dtype)
T[0:3, 0:3] = rpy_to_rotation_matrix(rpy)
T[3, 3] = 1
T[0:3, 3] = xyz
return T
def rpy_to_rotation_matrix(rpy):
"""
Creates 3x3 rotation matrix from rpy
See http://danceswithcode.net/engineeringnotes/rotations_in_3d/rotations_in_3d_part1.html
"""
u = rpy[0]
v = rpy[1]
w = rpy[2]
R = np.zeros([3, 3], dtype=rpy.dtype)
# first row
R[0, 0] = np.cos(v) * np.cos(w)
R[0, 1] = np.sin(u) * np.sin(v) * np.cos(w) - np.cos(u) * np.sin(w)
R[0, 2] = np.sin(u) * np.sin(w) + np.cos(u) * np.sin(v) * np.cos(w)
# second row
R[1, 0] = np.cos(v) * np.sin(w)
R[1, 1] = np.cos(u) * np.cos(w) + np.sin(u) * np.sin(v) * np.sin(w)
R[1, 2] = np.cos(u) * np.sin(v) * np.sin(w) - np.sin(u) * np.cos(w)
# third row
R[2, 0] = -np.sin(v)
R[2, 1] = np.sin(u) * np.cos(v)
R[2, 2] = np.cos(u) * np.cos(v)
return R
def transform_point(T, p):
"""
Transform a point via a homogeneous transform matrix T
:param: T 4x4 numpy array
"""
p_homog = np.concatenate((p, np.array([1])))
q = np.dot(T, p_homog)
q = q.squeeze()
q = q[0:3]
return q
def transform_vec(T, v):
"""
Transform a vector via a homogeneous transform matrix T
:param: T 4x4 numpy array
"""
v = np.array(v)
R = T[:3, :3]
u = np.dot(R, v)
return u
def xyzrpy_to_matrix_symbolic(xyzrpy):
"""
Create 4x4 homogeneous transform matrix from pos and rpy
"""
xyz = xyzrpy[0:3]
rpy = xyzrpy[3:6]
T = np.zeros([4, 4], dtype=xyzrpy.dtype)
T[0:3, 0:3] = rpy_to_rotation_matrix_symbolic(rpy)
T[3, 3] = 1
T[0:3, 3] = xyz
return T
def rpy_to_rotation_matrix_symbolic(rpy):
"""
Creates 3x3 rotation matrix from rpy
See http://danceswithcode.net/engineeringnotes/rotations_in_3d/rotations_in_3d_part1.html
"""
from pydrake.math import cos, sin
u = rpy[0]
v = rpy[1]
w = rpy[2]
R = np.zeros([3, 3], dtype=rpy.dtype)
# first row
R[0, 0] = cos(v) * cos(w)
R[0, 1] = sin(u) * sin(v) * cos(w) - cos(u) * sin(w)
R[0, 2] = sin(u) * sin(w) + cos(u) * sin(v) * cos(w)
# second row
R[1, 0] = cos(v) * sin(w)
R[1, 1] = cos(u) * cos(w) + sin(u) * sin(v) * sin(w)
R[1, 2] = cos(u) * sin(v) * sin(w) - sin(u) * cos(w)
# third row
R[2, 0] = -sin(v)
R[2, 1] = sin(u) * cos(v)
R[2, 2] = cos(u) * cos(v)
return R

View File

View File

@@ -0,0 +1,75 @@
task_name: CloseBox
category_name: Articulated
tool_keypoint_name_list: [tool_head, tool_tail, tool_side]
object_keypoint_name_list: [articulated_object_head, articulated_object_tail]
constraint_list:
# ensure the gripper to be in contact with the box lid with whole fingers
- keypoint_name: tool_head
target_keypoint_name: articulated_object_head
tolerance: 0.000001
type: point2point_constraint
name: fingers_contact_with_link0
# ensure surface of the gripper to be parallel to the box lid
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis: object_axis
target_axis_frame: object
tolerance: 0.05
target_inner_product: 1
type: frame_axis_parallel
name: hand_parallel_to_link0_edge
# - axis_from_keypoint_name: tool_head
# axis_to_keypoint_name: tool_tail
# target_axis: [1.0, 0, 0]
# target_axis_frame: object
# tolerance: 0.01
# target_inner_product: 0
# type: frame_axis_orthogonal
# - axis_from_keypoint_name: tool_head
# axis_to_keypoint_name: tool_tail
# target_axis_from_keypoint_name: articulated_object_head
# target_axis_to_keypoint_name: articulated_object_tail
# tolerance: 0.01
# target_inner_product: 0
# type: keypoint_axis_orthogonal
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
cross_target_axis1_from_keypoint_name: articulated_object_head
cross_target_axis1_to_keypoint_name: articulated_object_tail
target_axis: object_axis
target_axis_frame: object # cross_target_axis2_frame
tolerance: 0.05
target_inner_product: 1
type: frame_axis_parallel
name: fingers_orthogonal_to_link0
# for pre-actuation and post-actuation motions.
# Each motion is represented in the [mode,value] format
# mode: translate or rotate
# value: [x,y,z] in the tool frame for translate or radian for rotate
# Units are in meters and radians respectively.
contact_pose_index: 2
# pre-actuation pose list.
pre_actuation_motions: [ ["translate_z", -0.03]] # ["translate_z", -0.0001] , ["translate_x", -0.01],
# post-actuation pose list
post_actuation_motions: [ ["rotate",0.3],["rotate",0.2], ["rotate",0.1], ["rotate",0.1], ["rotate",0.1]] # ["translate_z", 0.08],
# trajectory time
# actuation_time: 24 # time to reach task goal pose
# pre_actuation_times: [16, 20]
# post_actuation_times: [26, 32]
actuation_time: 6 # time to reach task goal pose
# pre_actuation_times: [4, 8]
# post_actuation_times: [16, 20]
pre_actuation_times: [4] # 6
post_actuation_times: [10,12] # 6,

View File

@@ -0,0 +1,83 @@
task_name: CloseBox
category_name: Articulated
tool_keypoint_name_list: [tool_head, tool_tail, tool_side]
object_keypoint_name_list: [articulated_object_head, articulated_object_tail]
constraint_list:
# ensure the gripper to be in contact with the box lid with whole fingers
- keypoint_name: tool_head
target_keypoint_name: link_contact_point
tolerance: 0.000001
type: point2point_constraint
name: fingers_contact_with_link0
# ensure surface of the gripper to be parallel to the box lid
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis: object_link0_contact_axis
target_axis_frame: object
tolerance: 0.05
target_inner_product: 1
type: frame_axis_parallel
name: hand_parallel_to_link0_edge
- axis_from_keypoint_name: tool_tail
axis_to_keypoint_name: tool_head
target_axis: object_link0_move_axis
target_axis_frame: object
tolerance: 0.05
target_inner_product: 1
type: frame_axis_parallel
name: hand_parallel_to_link0_move_axis
# - axis_from_keypoint_name: tool_head
# axis_to_keypoint_name: tool_tail
# target_axis: [1.0, 0, 0]
# target_axis_frame: object
# tolerance: 0.01
# target_inner_product: 0
# type: frame_axis_orthogonal
# - axis_from_keypoint_name: tool_head
# axis_to_keypoint_name: tool_tail
# target_axis_from_keypoint_name: articulated_object_head
# target_axis_to_keypoint_name: articulated_object_tail
# tolerance: 0.01
# target_inner_product: 0
# type: keypoint_axis_orthogonal
# - axis_from_keypoint_name: tool_head
# axis_to_keypoint_name: tool_tail
# cross_target_axis1_from_keypoint_name: articulated_object_head
# cross_target_axis1_to_keypoint_name: articulated_object_tail
# target_axis: object_axis
# target_axis_frame: object # cross_target_axis2_frame
# tolerance: 0.05
# target_inner_product: 1
# type: frame_axis_parallel #
# name: fingers_orthogonal_to_link0
# for pre-actuation and post-actuation motions.
# Each motion is represented in the [mode,value] format
# mode: translate or rotate
# value: [x,y,z] in the tool frame for translate or radian for rotate
# Units are in meters and radians respectively.
contact_pose_index: 1
# pre-actuation pose list.
pre_actuation_motions: [["translate_z", -0.05]] # ["translate_z", -0.0001] , ["translate_x", -0.01],
# post-actuation pose list
post_actuation_motions: [["translate_z", 0.3],["translate_z", 0.1],["translate_z", 0.05],["translate_z", 0.05]] # ["translate_z", 0.08],
# trajectory time
# actuation_time: 24 # time to reach task goal pose
# pre_actuation_times: [16, 20]
# post_actuation_times: [26, 32]
actuation_time: 6 # time to reach task goal pose
# pre_actuation_times: [4, 8]
# post_actuation_times: [16, 20]
pre_actuation_times: [4] # 6
post_actuation_times: [10, 12] # 6,

View File

@@ -0,0 +1,81 @@
task_name: CloseBox
category_name: Articulated
tool_keypoint_name_list: [tool_head, tool_tail, tool_side]
object_keypoint_name_list: [articulated_object_head, articulated_object_tail]
constraint_list:
# ensure the gripper to be in contact with the box lid with whole fingers
- keypoint_name: tool_head
target_keypoint_name: articulated_object_head
tolerance: 0.000001
type: point2point_constraint
name: fingers_contact_with_link0
# ensure surface of the gripper to be parallel to the box lid
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis: link0_contact_axis
target_axis_frame: object
tolerance: 0.05
target_inner_product: 1
type: frame_axis_parallel
name: hand_parallel_to_link0_edge
# - axis_from_keypoint_name: tool_head
# axis_to_keypoint_name: tool_tail
# target_axis: [1.0, 0, 0]
# target_axis_frame: object
# tolerance: 0.01
# target_inner_product: 0
# type: frame_axis_orthogonal
# - axis_from_keypoint_name: tool_head
# axis_to_keypoint_name: tool_tail
# target_axis_from_keypoint_name: articulated_object_head
# target_axis_to_keypoint_name: articulated_object_tail
# tolerance: 0.01
# target_inner_product: 0
# type: keypoint_axis_orthogonal
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
cross_target_axis1_from_keypoint_name: articulated_object_head
cross_target_axis1_to_keypoint_name: articulated_object_tail
target_axis: link0_contact_axis
target_axis_frame: object # cross_target_axis2_frame
tolerance: 0.05
target_inner_product: 1
type: frame_axis_parallel
name: fingers_orthogonal_to_link0
# for pre-actuation and post-actuation motions.
# Each motion is represented in the [mode,value] format
# mode: translate or rotate
# value: [x,y,z] in the tool frame for translate or radian for rotate
# Units are in meters and radians respectively.
contact_pose_index: 2
# pre-actuation pose list.
pre_actuation_motions: [["translate_z", -0.03]] # ["translate_z", -0.0001] , ["translate_x", -0.01],
# post-actuation pose list
post_actuation_motions: [
["rotate", 0.3],
["rotate", 0.2],
["rotate", 0.1],
["rotate", 0.1],
["rotate", 0.1],
["rotate", 0.1],
["rotate", 0.1],
] # ["translate_z", 0.08],
# trajectory time
# actuation_time: 24 # time to reach task goal pose
# pre_actuation_times: [16, 20]
# post_actuation_times: [26, 32]
actuation_time: 6 # time to reach task goal pose
# pre_actuation_times: [4, 8]
# post_actuation_times: [16, 20]
pre_actuation_times: [4] # 6
post_actuation_times: [10, 12] # 6,

View File

@@ -0,0 +1,76 @@
task_name: CloseBox
category_name: Articulated
tool_keypoint_name_list: [tool_head, tool_tail, tool_side]
object_keypoint_name_list: [articulated_object_head, articulated_object_tail]
constraint_list:
# ensure the gripper to be in contact with the box lid with whole fingers
- keypoint_name: tool_head
target_keypoint_name: articulated_object_head
tolerance: 0.000001
type: point2point_constraint
name: fingers_contact_with_link0
# ensure surface of the gripper to be parallel to the box lid
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis: link0_contact_axis
target_axis_frame: object
tolerance: 0.05
target_inner_product: 1
type: frame_axis_parallel
name: hand_parallel_to_link0_edge
# - axis_from_keypoint_name: tool_head
# axis_to_keypoint_name: tool_tail
# target_axis: [1.0, 0, 0]
# target_axis_frame: object
# tolerance: 0.01
# target_inner_product: 0
# type: frame_axis_orthogonal
# - axis_from_keypoint_name: tool_head
# axis_to_keypoint_name: tool_tail
# target_axis_from_keypoint_name: articulated_object_head
# target_axis_to_keypoint_name: articulated_object_tail
# tolerance: 0.01
# target_inner_product: 0
# type: keypoint_axis_orthogonal
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
cross_target_axis1_from_keypoint_name: articulated_object_head
cross_target_axis1_to_keypoint_name: articulated_object_tail
target_axis: link0_contact_axis
target_axis_frame: object # cross_target_axis2_frame
tolerance: 0.05
target_inner_product: 1
type: frame_axis_parallel
name: fingers_orthogonal_to_link0
# for pre-actuation and post-actuation motions.
# Each motion is represented in the [mode,value] format
# mode: translate or rotate
# value: [x,y,z] in the tool frame for translate or radian for rotate
# Units are in meters and radians respectively.
contact_pose_index: 2
# pre-actuation pose list.
pre_actuation_motions: [ ["translate_z", -0.03]] # ["translate_z", -0.0001] , ["translate_x", -0.01],
# post-actuation pose list
post_actuation_motions: [ ["rotate",0.7],["rotate",0.3], ["rotate",0.2], ["rotate",0.1], ["rotate",0.1]] # ["translate_z", 0.08],
# trajectory time
# actuation_time: 24 # time to reach task goal pose
# pre_actuation_times: [16, 20]
# post_actuation_times: [26, 32]
actuation_time: 6 # time to reach task goal pose
# pre_actuation_times: [4, 8]
# post_actuation_times: [16, 20]
pre_actuation_times: [4] # 6
post_actuation_times: [10,12] # 6,

View File

@@ -0,0 +1,46 @@
task_name: CloseLaptop
category_name: Articulated
tool_keypoint_name_list: [tool_head, tool_tail, tool_side]
object_keypoint_name_list: [articulated_object_head, articulated_object_tail]
constraint_list:
- keypoint_name: tool_tail
target_keypoint_name: articulated_object_head
tolerance: 0.0001
type: point2point_constraint
# ensure surface of the gripper to be parallel to the laptop lid
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis: [1.0, 0, 0]
target_axis_frame: object
tolerance: 0.01
target_inner_product: 1
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis: [1.0, 0, 0]
target_axis_frame: object
tolerance: 0.01
target_inner_product: 0
type: frame_axis_orthogonal
# for pre-actuation and post-actuation poses relative to the tool.
# Z axis positive points from gripper to fingers and X axis points to the front direction.
# Each pose is represented in the [[x,y,z,roll,pitch,yaw]] format
# Units are in meters and radians respectively.
# pre-actuation pose list.
pre_actuation_motions: [["translate_x", 0.06], ["translate_z", -0.15]] # [adjust gripper direction and approach, move gripper above lid]
# post-actuation pose list.
post_actuation_motions: [["translate_z", 0.05], ["translate_x", -0.25]] # [press lid down]
# trajectory time
actuation_time: 20 # time to reach task goal pose
pre_actuation_times: [12, 16]
post_actuation_times: [22, 30]

View File

@@ -0,0 +1,42 @@
category_name: Articulated
constraint_list:
- keypoint_name: tool_tail
target_keypoint_name: articulated_object_head
tolerance: 0.0001
type: point2point_constraint
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis:
- 0
- 1.0
- 0
target_axis_frame: object
target_inner_product: -1
tolerance: 0.01
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis:
- 1.0
- 0
- 0
target_axis_frame: object
target_inner_product: 1
tolerance: 0.01
type: frame_axis_parallel
object_keypoint_name_list:
- articulated_object_head
post_actuation_motions:
- move-right
- move-forward
- move-forward
pre_actuation_motions:
- - translate_x
- -0.1
- - translate_z
- -0.15
task_name: CloseMicrowave
tool_keypoint_name_list:
- tool_head
- tool_tail
- tool_side

View File

@@ -0,0 +1,42 @@
category_name: Articulated
constraint_list:
- keypoint_name: tool_tail
target_keypoint_name: articulated_object_head
tolerance: 0.0001
type: point2point_constraint
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis:
- 0
- 1.0
- 0
target_axis_frame: object
target_inner_product: -1
tolerance: 0.01
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis:
- 1.0
- 0
- 0
target_axis_frame: object
target_inner_product: 1
tolerance: 0.01
type: frame_axis_parallel
object_keypoint_name_list:
- articulated_object_head
post_actuation_motions:
- move-right
- move-forward
- move-forward
pre_actuation_motions:
- - translate_x
- -0.05
- - translate_z
- -0.15
task_name: CloseRefrigeratorDoor
tool_keypoint_name_list:
- tool_head
- tool_tail
- tool_side

View File

@@ -0,0 +1,41 @@
category_name: Articulated
constraint_list:
- keypoint_name: tool_tail
target_keypoint_name: articulated_object_head
tolerance: 0.0001
type: point2point_constraint
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis:
- 0
- 0
- 1.0
target_axis_frame: object
target_inner_product: 0
tolerance: 0.01
type: frame_axis_orthogonal
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis:
- 0
- 1.0
- 0
target_axis_frame: object
target_inner_product: 1
tolerance: 0.01
type: frame_axis_parallel
object_keypoint_name_list:
- articulated_object_head
post_actuation_motions:
- - translate_x
- 0.2
pre_actuation_motions:
- - translate_x
- -0.1
- - translate_z
- -0.2
task_name: CloseSafe
tool_keypoint_name_list:
- tool_head
- tool_tail
- tool_side

View File

@@ -0,0 +1,56 @@
category_name: Articulated
constraint_list:
- keypoint_name: tool_tail
target_keypoint_name: articulated_object_head
tolerance: 0.0001
type: point2point_constraint
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis:
- 1.0
- 0
- 0
target_axis_frame: object
target_inner_product: -1
tolerance: 0.01
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis_from_keypoint_name: articulated_object_head
target_axis_to_keypoint_name: articulated_object_tail
target_inner_product: -1
tolerance: 0.01
type: keypoint_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis_from_keypoint_name: articulated_object_head
target_axis_to_keypoint_name: articulated_object_tail
target_inner_product: 0
tolerance: 0.01
type: keypoint_axis_orthogonal
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis:
- 1.0
- 0
- 0
target_axis_frame: object
target_inner_product: 0
tolerance: 0.01
type: frame_axis_orthogonal
object_keypoint_name_list:
- articulated_object_head
- articulated_object_tail
post_actuation_motions:
- move-down
- move-backward
pre_actuation_motions:
- - translate_x
- 0.1
- - translate_z
- -0.15
task_name: CloseSuitcaseLid
tool_keypoint_name_list:
- tool_head
- tool_tail
- tool_side

View File

@@ -0,0 +1,41 @@
category_name: Articulated
constraint_list:
- keypoint_name: tool_tail
target_keypoint_name: articulated_object_head
tolerance: 0.0001
type: point2point_constraint
- axis_from_keypoint_name: tool_tail
axis_to_keypoint_name: tool_head
target_axis:
- 0
- 0
- -1
target_axis_frame: object
target_inner_product: 1
tolerance: 0.01
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis:
- 0
- 0
- 1
target_axis_frame: world
target_inner_product: 1
tolerance: 0.01
type: frame_axis_parallel
object_keypoint_name_list:
- articulated_object_head
post_actuation_motions:
- move-up
- move-up
pre_actuation_motions:
- - translate_z
- -0.2
- - translate_x
- -0.1
task_name: LiftBucketUpright
tool_keypoint_name_list:
- tool_head
- tool_tail
- tool_side

View File

@@ -0,0 +1,40 @@
task_name: MoveAboveBucket
category_name: Articulated
tool_keypoint_name_list: [tool_head, tool_tail, tool_side]
object_keypoint_name_list: [articulated_object_above]
constraint_list:
- keypoint_name: tool_head
target_keypoint_name: articulated_object_above
tolerance: 0.0001
type: point2point_constraint
# ensure surface of the gripper to be parallel to the laptop lid
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis: [0, 1.0, 0]
target_axis_frame: world
tolerance: 0.01
target_inner_product: -1
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis: [0, 0, 1.0]
target_axis_frame: world
tolerance: 0.01
target_inner_product: 1
type: frame_axis_parallel
# pre-actuation pose list.
pre_actuation_motions: [["translate_z", -0.1], ["translate_y", 0.2],] # [adjust gripper direction and approach, move gripper above lid]
# post-actuation pose list.
post_actuation_motions: [] # [press lid down]
# trajectory time
actuation_time: 30 # time to reach task goal pose
pre_actuation_times: [15, 20]
post_actuation_times: []

View File

@@ -0,0 +1,41 @@
category_name: Articulated
constraint_list:
- keypoint_name: tool_tail
target_keypoint_name: articulated_object_head
tolerance: 0.0001
type: point2point_constraint
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis:
- 0
- 1.0
- 0
target_axis_frame: object
target_inner_product: -1
tolerance: 0.01
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis:
- 1.0
- 0
- 0
target_axis_frame: object
target_inner_product: 1
tolerance: 0.01
type: frame_axis_parallel
object_keypoint_name_list:
- articulated_object_head
post_actuation_motions:
- move-forward
- move-up
pre_actuation_motions:
- - translate_x
- -0.05
- - translate_z
- -0.15
task_name: MoveBagForward
tool_keypoint_name_list:
- tool_head
- tool_tail
- tool_side

View File

@@ -0,0 +1,40 @@
task_name: MoveInMicrowave
category_name: Articulated
tool_keypoint_name_list: [tool_head, tool_tail, tool_side]
object_keypoint_name_list: [articulated_object_inside_base]
constraint_list:
- keypoint_name: tool_head
target_keypoint_name: articulated_object_inside_base
tolerance: 0.0001
type: point2point_constraint
# ensure surface of the gripper to be parallel to the laptop lid
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis: [0, 0, 1.0]
target_axis_frame: world
tolerance: 0.01
target_inner_product: 0.707
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis: [0, 1.0, 0]
target_axis_frame: object
tolerance: 0.01
target_inner_product: 0
type: frame_axis_orthogonal
# pre-actuation pose list.
pre_actuation_motions: [["translate_x", -0.1], ["translate_z", -0.2]] # [adjust gripper direction and approach, move gripper above lid]
# post-actuation pose list.
post_actuation_motions: [] # [press lid down]
# trajectory time
actuation_time: 30 # time to reach task goal pose
pre_actuation_times: [20, 25]
post_actuation_times: []

View File

@@ -0,0 +1,40 @@
task_name: MoveInsideBox
category_name: Articulated
tool_keypoint_name_list: [tool_head, tool_tail, tool_side]
object_keypoint_name_list: [articulated_object_inside_base]
constraint_list:
- keypoint_name: tool_head
target_keypoint_name: articulated_object_inside_base
tolerance: 0.0001
type: point2point_constraint
# ensure surface of the gripper to be parallel to the laptop lid
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis: [0, 1.0, 0]
target_axis_frame: world
tolerance: 0.01
target_inner_product: -1
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis: [0, 0, 1.0]
target_axis_frame: world
tolerance: 0.01
target_inner_product: 1
type: frame_axis_parallel
# pre-actuation pose list.
pre_actuation_motions: [["translate_z", -0.1], ["translate_y", 0.2],] # [adjust gripper direction and approach, move gripper above lid]
# post-actuation pose list.
post_actuation_motions: [] # [press lid down]
# trajectory time
actuation_time: 30 # time to reach task goal pose
pre_actuation_times: [15, 20]
post_actuation_times: []

View File

@@ -0,0 +1,40 @@
task_name: MoveOnLaptop
category_name: Articulated
tool_keypoint_name_list: [tool_head, tool_tail, tool_side]
object_keypoint_name_list: [articulated_object_surface_base]
constraint_list:
- keypoint_name: tool_head
target_keypoint_name: articulated_object_surface_base
tolerance: 0.0001
type: point2point_constraint
# ensure surface of the gripper to be parallel to the laptop lid
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis: [0, 1.0, 0]
target_axis_frame: world
tolerance: 0.01
target_inner_product: -1
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis: [0, 0, 1.0]
target_axis_frame: world
tolerance: 0.01
target_inner_product: 1
type: frame_axis_parallel
# pre-actuation pose list.
pre_actuation_motions: [["translate_z", -0.1], ["translate_y", 0.2],] # [adjust gripper direction and approach, move gripper above lid]
# post-actuation pose list.
post_actuation_motions: [] # [press lid down]
# trajectory time
actuation_time: 30 # time to reach task goal pose
pre_actuation_times: [15, 20]
post_actuation_times: []

View File

@@ -0,0 +1,40 @@
task_name: MoveToDrawer
category_name: Articulated
tool_keypoint_name_list: [tool_head, tool_tail, tool_side]
object_keypoint_name_list: [articulated_object_inside]
constraint_list:
- keypoint_name: tool_head
target_keypoint_name: articulated_object_inside
tolerance: 0.0001
type: point2point_constraint
# ensure surface of the gripper to be parallel to the laptop lid
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis: [0, 1.0, 0]
target_axis_frame: world
tolerance: 0.01
target_inner_product: -1
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis: [0, 0, 1.0]
target_axis_frame: world
tolerance: 0.01
target_inner_product: 1
type: frame_axis_parallel
# pre-actuation pose list.
pre_actuation_motions: [["translate_z", -0.1], ["translate_y", 0.2],] # [adjust gripper direction and approach, move gripper above lid]
# post-actuation pose list.
post_actuation_motions: [] # [press lid down]
# trajectory time
actuation_time: 30 # time to reach task goal pose
pre_actuation_times: [15, 20]
post_actuation_times: []

View File

@@ -0,0 +1,40 @@
task_name: MoveToRefrigerator
category_name: Articulated
tool_keypoint_name_list: [tool_head, tool_tail, tool_side]
object_keypoint_name_list: [articulated_object_inside_base]
constraint_list:
- keypoint_name: tool_head
target_keypoint_name: articulated_object_inside_base
tolerance: 0.0001
type: point2point_constraint
# ensure surface of the gripper to be parallel to the laptop lid
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis: [0, 1.0, 0]
target_axis_frame: world
tolerance: 0.01
target_inner_product: -1
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis: [0, 0, 1.0]
target_axis_frame: world
tolerance: 0.01
target_inner_product: 1
type: frame_axis_parallel
# pre-actuation pose list.
pre_actuation_motions: [["translate_x", -0.2], ["translate_y", 0.2],] # [adjust gripper direction and approach, move gripper above lid]
# post-actuation pose list.
post_actuation_motions: [] # [press lid down]
# trajectory time
actuation_time: 30 # time to reach task goal pose
pre_actuation_times: [15, 20]
post_actuation_times: []

View File

@@ -0,0 +1,40 @@
task_name: MoveUnderFaucet
category_name: Articulated
tool_keypoint_name_list: [tool_head, tool_tail, tool_side]
object_keypoint_name_list: [articulated_object_bottom_base]
constraint_list:
- keypoint_name: tool_head
target_keypoint_name: articulated_object_bottom_base
tolerance: 0.0001
type: point2point_constraint
# ensure surface of the gripper to be parallel to the laptop lid
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis: [0, 0, 1.0]
target_axis_frame: world
tolerance: 0.01
target_inner_product: 0
type: frame_axis_orthogonal
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis: [1.0, 0, 0]
target_axis_frame: world
tolerance: 0.01
target_inner_product: 1
type: frame_axis_parallel
# pre-actuation pose list.
pre_actuation_motions: [["translate_y", -0.15], ["translate_x", -0.2]] # [adjust gripper direction and approach, move gripper above lid]
# post-actuation pose list.
post_actuation_motions: [] # [press lid down]
# trajectory time
actuation_time: 30 # time to reach task goal pose
pre_actuation_times: [20, 25]
post_actuation_times: []

View File

@@ -0,0 +1,62 @@
task_name: OpenBox
category_name: Articulated
tool_keypoint_name_list: [tool_head, tool_tail, tool_side]
object_keypoint_name_list: [articulated_object_head, articulated_object_tail]
constraint_list:
# make sure the robot gripper hits the box lid
- keypoint_name: tool_tail
target_keypoint_name: articulated_object_head
tolerance: 0.0001
type: point2point_constraint
# ensure surface of the gripper to be parallel to the box lid
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis: [1.0, 0, 0]
target_axis_frame: object
tolerance: 0.01
target_inner_product: 1
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis_from_keypoint_name: articulated_object_head
target_axis_to_keypoint_name: articulated_object_tail
tolerance: 0.01
target_inner_product: -1
type: keypoint_axis_parallel
# ensure gripper to point directly to the direction of the box
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis_from_keypoint_name: articulated_object_head
target_axis_to_keypoint_name: articulated_object_tail
tolerance: 0.01
target_inner_product: 0
type: keypoint_axis_orthogonal
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis: [1.0, 0, 0]
target_axis_frame: object
tolerance: 0.01
target_inner_product: 0
type: frame_axis_orthogonal
# for pre-actuation and post-actuation poses relative to the tool.
# Z axis positive points from gripper to fingers and X axis points to the front direction.
# Each pose is represented in the [[x,y,z,roll,pitch,yaw]] format
# Units are in meters and radians respectively.
# pre-actuation notion list.
pre_actuation_motions: [["translate_x", -0.05], ["translate_z", -0.15]] # [move gripper below lid, adjust gripper direction and approach]
# post-actuation motion list. pushing down
post_actuation_motions: [["rotate", -0.5]] # [lift lid up]
# trajectory time
actuation_time: 24 # time to reach task goal pose
pre_actuation_times: [16, 20]
post_actuation_times: [32]

View File

@@ -0,0 +1 @@
{'task_name': 'OpenDrawer', 'category_name': 'Articulated', 'tool_keypoint_name_list': ['tool_head', 'tool_tail', 'tool_side'], 'object_keypoint_name_list': ['articulated_object_head'], 'constraint_list': [{'keypoint_name': 'tool_head', 'target_keypoint_name': 'articulated_object_head', 'tolerance': 0.0001, 'type': 'point2point_constraint'}, {'axis_from_keypoint_name': 'tool_head', 'axis_to_keypoint_name': 'tool_side', 'target_axis': [1.0, 0, 0], 'target_axis_frame': 'object', 'tolerance': 0.01, 'target_inner_product': 1, 'type': 'frame_axis_parallel'}, {'axis_from_keypoint_name': 'tool_head', 'axis_to_keypoint_name': 'tool_tail', 'target_axis': [0, 0, 1.0], 'target_axis_frame': 'object', 'tolerance': 0.01, 'target_inner_product': 0, 'type': 'frame_axis_orthogonal'}], 'pre_actuation_motions': [['translate_x', 0.1], ['translate_z', -0.1]], 'post_actuation_motions': ['move-backward']}

View File

@@ -0,0 +1,62 @@
task_name: OpenLaptop
category_name: Articulated
tool_keypoint_name_list: [tool_head, tool_tail, tool_side]
object_keypoint_name_list: [articulated_object_head, articulated_object_tail]
constraint_list:
- keypoint_name: tool_tail
target_keypoint_name: articulated_object_head
tolerance: 0.0001
type: point2point_constraint
# ensure surface of the gripper to be parallel to the laptop lid
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis: [1.0, 0, 0]
target_axis_frame: object
tolerance: 0.01
target_inner_product: 1
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis_from_keypoint_name: articulated_object_head
target_axis_to_keypoint_name: articulated_object_tail
tolerance: 0.01
target_inner_product: -1
type: keypoint_axis_parallel
# ensure gripper to point directly to the direction of the laptop
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis_from_keypoint_name: articulated_object_head
target_axis_to_keypoint_name: articulated_object_tail
tolerance: 0.01
target_inner_product: 0
type: keypoint_axis_orthogonal
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis: [1.0, 0, 0]
target_axis_frame: object
tolerance: 0.01
target_inner_product: 0
type: frame_axis_orthogonal
# for pre-actuation and post-actuation poses relative to the tool.
# Z axis positive points from gripper to fingers and X axis points to the front direction.
# Each pose is represented in the [[x,y,z,roll,pitch,yaw]] format
# Units are in meters and radians respectively.
# pre-actuation pose list.
pre_actuation_motions: [["translate_x", -0.05], ["translate_z", -0.15]] # [adjust gripper direction and approach, move gripper below lid]
# post-actuation pose list. pushing down
post_actuation_motions: [["translate_z", 0.05], ["translate_x", 0.15]] # [lift lid up]
# trajectory time
actuation_time: 20 # time to reach task goal pose
pre_actuation_times: [12, 16]
post_actuation_times: [22, 30]

View File

@@ -0,0 +1,9 @@
{category_name: Articulated, constraint_list: [{keypoint_name: tool_tail, target_keypoint_name: articulated_object_head,
tolerance: 0.0001, type: point2point_constraint}, {axis_from_keypoint_name: tool_head,
axis_to_keypoint_name: tool_side, target_axis: [0, 1.0, 0], target_axis_frame: object,
target_inner_product: -1, tolerance: 0.01, type: frame_axis_parallel}, {axis_from_keypoint_name: tool_head,
axis_to_keypoint_name: tool_tail, target_axis: [1.0, 0, 0], target_axis_frame: object,
target_inner_product: 1, tolerance: 0.01, type: frame_axis_parallel}], object_keypoint_name_list: [
articulated_object_head], post_actuation_motions: [[translate_x, -0.5]], pre_actuation_motions: [
[translate_x, 0.1], [translate_z, -0.15]], task_name: OpenRefrigeratorDoor, tool_keypoint_name_list: [
tool_head, tool_tail, tool_side]}

View File

@@ -0,0 +1,41 @@
category_name: Articulated
constraint_list:
- keypoint_name: tool_tail
target_keypoint_name: articulated_object_head
tolerance: 0.0001
type: point2point_constraint
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis:
- 0
- 1.0
- 0
target_axis_frame: object
target_inner_product: 1
tolerance: 0.01
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis:
- 1.0
- 0
- 0
target_axis_frame: object
target_inner_product: -1
tolerance: 0.01
type: frame_axis_parallel
object_keypoint_name_list:
- articulated_object_head
post_actuation_motions:
- move-right
- move-right
pre_actuation_motions:
- - translate_x
- 0.1
- - translate_z
- -0.15
task_name: OpenSafe
tool_keypoint_name_list:
- tool_head
- tool_tail
- tool_side

View File

@@ -0,0 +1 @@
{'task_name': 'PressToasterLever', 'category_name': 'Articulated', 'tool_keypoint_name_list': ['tool_head', 'tool_tail', 'tool_side'], 'object_keypoint_name_list': ['articulated_object_head'], 'constraint_list': [{'keypoint_name': 'tool_side', 'target_keypoint_name': 'articulated_object_head', 'tolerance': 0.0001, 'type': 'point2point_constraint'}, {'axis_from_keypoint_name': 'tool_head', 'axis_to_keypoint_name': 'tool_side', 'target_axis': [1.0, 0, 0], 'target_axis_frame': 'object', 'tolerance': 0.01, 'target_inner_product': 1, 'type': 'frame_axis_parallel'}, {'axis_from_keypoint_name': 'tool_head', 'axis_to_keypoint_name': 'tool_tail', 'target_axis': [0, 0, 1.0], 'target_axis_frame': 'object', 'tolerance': 0.01, 'target_inner_product': 0, 'type': 'frame_axis_orthogonal'}], 'pre_actuation_motions': [['translate_z', -0.15]], 'post_actuation_motions': ['move-down', 'move-down']}

View File

@@ -0,0 +1,42 @@
category_name: Articulated
constraint_list:
- keypoint_name: tool_head
target_keypoint_name: articulated_object_head
tolerance: 0.0001
type: point2point_constraint
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis:
- 1.0
- 0
- 0
target_axis_frame: object
target_inner_product: 1
tolerance: 0.01
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis:
- 0
- 0
- 1.0
target_axis_frame: object
target_inner_product: 0
tolerance: 0.01
type: frame_axis_orthogonal
object_keypoint_name_list:
- articulated_object_head
post_actuation_motions:
- move-forward
- move-left
- move-forward
pre_actuation_motions:
- - translate_x
- -0.05
- - translate_z
- -0.15
task_name: PushBox
tool_keypoint_name_list:
- tool_head
- tool_tail
- tool_side

View File

@@ -0,0 +1 @@
{'task_name': 'PushDrawerClose', 'category_name': 'Articulated', 'tool_keypoint_name_list': ['tool_head', 'tool_tail', 'tool_side'], 'object_keypoint_name_list': ['articulated_object_head'], 'constraint_list': [{'keypoint_name': 'tool_head', 'target_keypoint_name': 'articulated_object_head', 'tolerance': 0.0001, 'type': 'point2point_constraint'}, {'axis_from_keypoint_name': 'tool_head', 'axis_to_keypoint_name': 'tool_side', 'target_axis': [1.0, 0, 0], 'target_axis_frame': 'object', 'tolerance': 0.01, 'target_inner_product': 1, 'type': 'frame_axis_parallel'}, {'axis_from_keypoint_name': 'tool_head', 'axis_to_keypoint_name': 'tool_tail', 'target_axis': [0, 0, 1.0], 'target_axis_frame': 'object', 'tolerance': 0.01, 'target_inner_product': 0, 'type': 'frame_axis_orthogonal'}], 'pre_actuation_motions': [['translate_x', -0.05], ['translate_z', -0.05]], 'post_actuation_motions': [['translate_x', 0.13]], 'actuation_time': 14, 'pre_actuation_times': [7, 9], 'post_actuation_times': [17]}

View File

@@ -0,0 +1,41 @@
category_name: Articulated
constraint_list:
- keypoint_name: tool_head
target_keypoint_name: articulated_object_head
tolerance: 0.0001
type: point2point_constraint
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis:
- 1.0
- 0
- 0
target_axis_frame: object
target_inner_product: 1
tolerance: 0.01
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis:
- 0
- 0
- 1.0
target_axis_frame: object
target_inner_product: 0
tolerance: 0.01
type: frame_axis_orthogonal
object_keypoint_name_list:
- articulated_object_head
post_actuation_motions:
- move-forward
- move-forward
pre_actuation_motions:
- - translate_x
- -0.05
- - translate_z
- -0.1
task_name: push-oven-close
tool_keypoint_name_list:
- tool_head
- tool_tail
- tool_side

View File

@@ -0,0 +1,41 @@
category_name: Articulated
constraint_list:
- keypoint_name: tool_tail
target_keypoint_name: articulated_object_head
tolerance: 0.0001
type: point2point_constraint
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis:
- 1.0
- 0
- 0
target_axis_frame: object
target_inner_product: 1
tolerance: 0.01
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis:
- 0
- 0
- 1.0
target_axis_frame: object
target_inner_product: 0
tolerance: 0.01
type: frame_axis_orthogonal
object_keypoint_name_list:
- articulated_object_head
post_actuation_motions:
- move-forward
- move-backward
pre_actuation_motions:
- - translate_x
- -0.05
- - translate_z
- -0.15
task_name: PushToasterForward
tool_keypoint_name_list:
- tool_head
- tool_tail
- tool_side

View File

@@ -0,0 +1,42 @@
task_name: ReachBall
category_name: RigidBody
tool_keypoint_name_list: [tool_head, tool_tail, tool_side]
object_keypoint_name_list: [rigidbody_object_head]
constraint_list:
- keypoint_name: tool_head
target_keypoint_name: rigidbody_object_head
tolerance: 0.0001
type: point2point_constraint
# ensure surface of the gripper to be parallel to the laptop lid
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis: [0, 1.0, 0]
target_axis_frame: object
tolerance: 0.01
target_inner_product: -1
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis: [0, 0, 1.0]
target_axis_frame: world
tolerance: 0.01
target_inner_product: 1
type: frame_axis_parallel
# pre-actuation pose list.
pre_actuation_motions: [["translate_z", -0.3]] # [adjust gripper direction and approach, move gripper above lid]
# post-actuation pose list.
post_actuation_motions: [] # [press lid down]
# trajectory time
actuation_time: 20 # time to reach task goal pose
pre_actuation_times: [12]
post_actuation_times: []

View File

@@ -0,0 +1,42 @@
task_name: ReachCuboid
category_name: RigidBody
tool_keypoint_name_list: [tool_head, tool_tail, tool_side]
object_keypoint_name_list: [rigidbody_object_head]
constraint_list:
- keypoint_name: tool_tail
target_keypoint_name: rigidbody_object_head
tolerance: 0.0001
type: point2point_constraint
# ensure surface of the gripper to be parallel to the laptop lid
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis: [1.0, 0, 0]
target_axis_frame: object
tolerance: 0.01
target_inner_product: -1
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis: [0, 0, 1.0]
target_axis_frame: world
tolerance: 0.01
target_inner_product: 1
type: frame_axis_parallel
# pre-actuation pose list.
pre_actuation_motions: [["translate_z", -0.2]] # [adjust gripper direction and approach, move gripper above lid]
# post-actuation pose list.
post_actuation_motions: [] # [press lid down]
# trajectory time
actuation_time: 20 # time to reach task goal pose
pre_actuation_times: [12]
post_actuation_times: []

View File

@@ -0,0 +1,42 @@
task_name: ReachRigidBody
category_name: RigidBody
tool_keypoint_name_list: [tool_head, tool_tail, tool_side]
object_keypoint_name_list: [rigidbody_object_head]
constraint_list:
- keypoint_name: tool_head
target_keypoint_name: rigidbody_object_head
tolerance: 0.001
type: point2point_constraint
# # ensure surface of the gripper to be parallel to the laptop lid
# - axis_from_keypoint_name: tool_head
# axis_to_keypoint_name: tool_side
# target_axis: [1.0, 0, 0]
# target_axis_frame: object
# tolerance: 0.01
# target_inner_product: -1
# type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis: [0, 0, 1.0]
target_axis_frame: world
tolerance: 0.01
target_inner_product: 1
type: frame_axis_parallel
# pre-actuation pose list.
pre_actuation_motions: [["translate_z", -0.2]] # [adjust gripper direction and approach, move gripper above lid]
# post-actuation pose list.
post_actuation_motions: [["translate_z", 0.05]] # [press lid down]
# trajectory time
actuation_time: 20 # time to reach task goal pose
pre_actuation_times: [12]
post_actuation_times: [24]

View File

@@ -0,0 +1,42 @@
task_name: ReachRigidBody
category_name: RigidBody
tool_keypoint_name_list: [tool_head, tool_tail, tool_side]
object_keypoint_name_list: [rigidbody_object_head]
constraint_list:
- keypoint_name: tool_side
target_keypoint_name: rigidbody_object_head
tolerance: 0.001
type: point2point_constraint
# # ensure surface of the gripper to be parallel to the laptop lid
# - axis_from_keypoint_name: tool_head
# axis_to_keypoint_name: tool_side
# target_axis: [1.0, 0, 0]
# target_axis_frame: object
# tolerance: 0.01
# target_inner_product: -1
# type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis: [0, 0, 1.0]
target_axis_frame: world
tolerance: 0.01
target_inner_product: 1
type: frame_axis_parallel
# pre-actuation pose list.
pre_actuation_motions: [["translate_z", -0.2]] # [adjust gripper direction and approach, move gripper above lid]
# post-actuation pose list.
post_actuation_motions: [] # [press lid down]
# trajectory time
actuation_time: 20 # time to reach task goal pose
pre_actuation_times: [12]
post_actuation_times: []

View File

@@ -0,0 +1,49 @@
task_name: ReachRigidBody
category_name: RigidBody
tool_keypoint_name_list: [tool_head, tool_tail, tool_side]
object_keypoint_name_list: [rigidbody_object_head]
constraint_list:
- keypoint_name: tool_head
target_keypoint_name: rigidbody_object_head
tolerance: 0.001
type: point2point_constraint
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis: [0, 0, 1.0]
target_axis_frame: world
tolerance: 0.01
target_inner_product: 0
type: frame_axis_orthogonal
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis: [0, 0, 1.0]
target_axis_frame: world
tolerance: 0.01
target_inner_product: 0
type: frame_axis_orthogonal
# - axis_from_keypoint_name: tool_head
# axis_to_keypoint_name: tool_side
# target_axis: [0, 1.0, 0]
# target_axis_frame: object
# tolerance: 0.01
# target_inner_product: 1
# type: frame_axis_parallel
# pre-actuation pose list.
pre_actuation_motions: [["translate_z", -0.05]] # [adjust gripper direction and approach, move gripper above lid]
# post-actuation pose list.
post_actuation_motions: [["translate_z", 0.05]] # [press lid down]
# trajectory time
actuation_time: 20 # time to reach task goal pose
pre_actuation_times: [15]
post_actuation_times: [25]

View File

@@ -0,0 +1,41 @@
task_name: ReachThinObject
category_name: RigidBody
tool_keypoint_name_list: [tool_head, tool_tail, tool_side]
object_keypoint_name_list: [rigidbody_object_head]
constraint_list:
- keypoint_name: tool_head
target_keypoint_name: rigidbody_object_head
tolerance: 0.001
type: point2point_constraint
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis: [0, 0, 1.0]
target_axis_frame: world
tolerance: 0.01
target_inner_product: 1
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis: [1.0, 0, 0.0]
target_axis_frame: object
tolerance: 0.01
target_inner_product: 0
type: frame_axis_orthogonal
# pre-actuation pose list.
pre_actuation_motions: [["translate_z", -0.2]] # [adjust gripper direction and approach, move gripper above lid]
# post-actuation pose list.
post_actuation_motions: [["translate_z", 0.02]] # [press lid down]
# trajectory time
actuation_time: 20 # time to reach task goal pose
pre_actuation_times: [12]
post_actuation_times: []

View File

@@ -0,0 +1,42 @@
task_name: ReachXAxis
category_name: RigidBody
tool_keypoint_name_list: [tool_head, tool_tail, tool_side]
object_keypoint_name_list: [rigidbody_object_head]
constraint_list:
- keypoint_name: tool_tail
target_keypoint_name: rigidbody_object_head
tolerance: 0.0001
type: point2point_constraint
# ensure surface of the gripper to be parallel to the laptop lid
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis: [1.0, 0, 0]
target_axis_frame: object
tolerance: 0.01
target_inner_product: -1
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis: [0, 0, 1.0]
target_axis_frame: world
tolerance: 0.01
target_inner_product: 1
type: frame_axis_parallel
# pre-actuation pose list.
pre_actuation_motions: [["translate_z", -0.2]] # [adjust gripper direction and approach, move gripper above lid]
# post-actuation pose list.
post_actuation_motions: [] # [press lid down]
# trajectory time
actuation_time: 20 # time to reach task goal pose
pre_actuation_times: [12]
post_actuation_times: []

View File

@@ -0,0 +1,40 @@
category_name: Articulated
constraint_list:
- keypoint_name: tool_head
target_keypoint_name: articulated_object_head
tolerance: 0.0001
type: point2point_constraint
- axis_from_keypoint_name: tool_tail
axis_to_keypoint_name: tool_head
target_axis:
- 0
- 1
- 0
target_axis_frame: object
target_inner_product: 0
tolerance: 0.01
type: frame_axis_orthogonal
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis:
- 0
- 1
- 0
target_axis_frame: object
target_inner_product: 1
tolerance: 0.01
type: frame_axis_parallel
object_keypoint_name_list:
- articulated_object_head
post_actuation_motions:
- move-down
- move-forward
- move-up
pre_actuation_motions:
- - translate_z
- -0.05
task_name: RelocateSuitcase
tool_keypoint_name_list:
- tool_head
- tool_tail
- tool_side

View File

@@ -0,0 +1,41 @@
category_name: Articulated
constraint_list:
- keypoint_name: tool_head
target_keypoint_name: articulated_object_head
tolerance: 0.0001
type: point2point_constraint
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis:
- 1.0
- 0
- 0
target_axis_frame: object
target_inner_product: 1
tolerance: 0.01
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis:
- 0
- 0
- 1.0
target_axis_frame: object
target_inner_product: 0
tolerance: 0.01
type: frame_axis_orthogonal
object_keypoint_name_list:
- articulated_object_head
post_actuation_motions:
- move-left
- move-backward
pre_actuation_motions:
- - translate_x
- 0.12
- - translate_z
- -0.2
task_name: RotateMicrowaveDoor
tool_keypoint_name_list:
- tool_head
- tool_tail
- tool_side

View File

@@ -0,0 +1,40 @@
category_name: Articulated
constraint_list:
- keypoint_name: tool_head
target_keypoint_name: articulated_object_head
tolerance: 0.0001
type: point2point_constraint
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis:
- 1.0
- 0
- 0
target_axis_frame: object
target_inner_product: 1
tolerance: 0.01
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis:
- 0
- 0
- 1.0
target_axis_frame: object
target_inner_product: 0
tolerance: 0.01
type: frame_axis_orthogonal
object_keypoint_name_list:
- articulated_object_head
post_actuation_motions:
- move-down
pre_actuation_motions:
- - translate_x
- 0.1
- - translate_z
- -0.15
task_name: RotateOvenKnob
tool_keypoint_name_list:
- tool_head
- tool_tail
- tool_side

View File

@@ -0,0 +1,38 @@
category_name: Articulated
constraint_list:
- keypoint_name: tool_tail
target_keypoint_name: articulated_object_head
tolerance: 0.0001
type: point2point_constraint
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis:
- 0
- 1.0
- 0
target_axis_frame: object
target_inner_product: -1
tolerance: 0.01
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis:
- 1.0
- 0
- 0
target_axis_frame: object
target_inner_product: 1
tolerance: 0.01
type: frame_axis_parallel
object_keypoint_name_list:
- articulated_object_head
post_actuation_motions:
- move-forward
pre_actuation_motions:
- - translate_z
- -0.05
task_name: sway-bag-strap
tool_keypoint_name_list:
- tool_head
- tool_tail
- tool_side

View File

@@ -0,0 +1,47 @@
actuation_time: 20
category_name: Articulated
constraint_list:
- keypoint_name: tool_tail
target_keypoint_name: articulated_object_head
tolerance: 0.0001
type: point2point_constraint
- axis_from_keypoint_name: tool_tail
axis_to_keypoint_name: tool_head
target_axis:
- 0
- 0
- 1.0
target_axis_frame: world
target_inner_product: -1
tolerance: 0.01
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis:
- 0
- 1.0
- 0
target_axis_frame: object
target_inner_product: 0
tolerance: 0.01
type: frame_axis_orthogonal
object_keypoint_name_list:
- articulated_object_head
post_actuation_motions:
- - translate_z
- 0.05
- - translate_x
- 0.1
post_actuation_times:
- 24
- 28
pre_actuation_motions:
- - translate_z
- -0.1
pre_actuation_times:
- 15
task_name: SwingBucketHandle
tool_keypoint_name_list:
- tool_head
- tool_tail
- tool_side

View File

@@ -0,0 +1,39 @@
category_name: Articulated
constraint_list:
- keypoint_name: tool_head
target_keypoint_name: articulated_object_head
tolerance: 0.0001
type: point2point_constraint
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis:
- 0
- 1
- 0
target_axis_frame: object
target_inner_product: 0
tolerance: 0.01
type: frame_axis_orthogonal
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis:
- 1
- 0
- 0
target_axis_frame: object
target_inner_product: 1
tolerance: 0.01
type: frame_axis_parallel
object_keypoint_name_list:
- articulated_object_head
post_actuation_motions:
- move-right
- move-right
pre_actuation_motions:
- - translate_z
- -0.05
task_name: SwingDoorOpen
tool_keypoint_name_list:
- tool_head
- tool_tail
- tool_side

View File

@@ -0,0 +1,55 @@
category_name: Articulated
constraint_list:
- keypoint_name: tool_tail
target_keypoint_name: articulated_object_head
tolerance: 0.0001
type: point2point_constraint
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis:
- 1.0
- 0
- 0
target_axis_frame: object
target_inner_product: -1
tolerance: 0.01
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis_from_keypoint_name: articulated_object_head
target_axis_to_keypoint_name: articulated_object_tail
target_inner_product: -1
tolerance: 0.01
type: keypoint_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis_from_keypoint_name: articulated_object_head
target_axis_to_keypoint_name: articulated_object_tail
target_inner_product: 0
tolerance: 0.01
type: keypoint_axis_orthogonal
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis:
- 1.0
- 0
- 0
target_axis_frame: object
target_inner_product: 0
tolerance: 0.01
type: frame_axis_orthogonal
object_keypoint_name_list:
- articulated_object_head
- articulated_object_tail
post_actuation_motions:
- move-forward
pre_actuation_motions:
- - translate_x
- -0.05
- - translate_z
- -0.1
task_name: swing-suitcase-lid-open
tool_keypoint_name_list:
- tool_head
- tool_tail
- tool_side

View File

@@ -0,0 +1,40 @@
category_name: Articulated
constraint_list:
- keypoint_name: tool_tail
target_keypoint_name: articulated_object_head
tolerance: 0.0001
type: point2point_constraint
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis:
- 0
- 1.0
- 0
target_axis_frame: object
target_inner_product: 1
tolerance: 0.01
type: frame_axis_parallel
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis:
- 1.0
- 0
- 0
target_axis_frame: object
target_inner_product: -1
tolerance: 0.01
type: frame_axis_parallel
object_keypoint_name_list:
- articulated_object_head
post_actuation_motions:
- move-forward
- move-left
- move-backward
pre_actuation_motions:
- - translate_z
- -0.05
task_name: toggle-door-close
tool_keypoint_name_list:
- tool_head
- tool_tail
- tool_side

View File

@@ -0,0 +1,65 @@
task_name: TurnOffFaucet
category_name: Articulated
tool_keypoint_name_list: [tool_head, tool_tail, tool_side, ]
object_keypoint_name_list: [articulated_object_head, articulated_object_tail]
constraint_list:
- keypoint_name: tool_head
target_keypoint_name: articulated_object_head
tolerance: 0.0001
type: point2point_constraint
# ensure the gripper to point straight to the table
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis: [0, 0, 1.0]
target_axis_frame: world
tolerance: 0.01
type: frame_axis_parallel
target_inner_product: 1
# ensure the axis between gripper finger tips to be parallel to the table
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis: [0, 0, 1.0]
target_axis_frame: frame_axis_parallel
tolerance: 0.01
type: frame_axis_orthogonal
target_inner_product: 0
# ensure the axis between gripper finger tips to be orthogonal to the faucet handle
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis: [0, 0, 1.0]
target_axis_frame: object
tolerance: 0.01
type: frame_axis_orthogonal
target_inner_product: 0
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis: [1.0, 0, 0]
target_axis_frame: object
tolerance: 0.01
type: frame_axis_parallel
target_inner_product: 1
# for pre-actuation and post-actuation poses relative to the tool.
# Z axis positive points from gripper to fingers and X axis points to the front direction.
# Each pose is represented in the [[x,y,z,roll,pitch,yaw]] format
# Units are in meters and radians respectively.
# pre-actuation pose list.
pre_actuation_motions: [["translate_y", -0.1], ["translate_z", -0.15]] # [move above handle]
# post-actuation pose list. pushing down
post_actuation_motions: [["translate_z", 0.05], ["translate_y", 0.08]] # [turn the handle by 90 degree from left to right, turn the handle by 180 degree from left to right]
# trajectory time
actuation_time: 16 # time to reach task goal pose
pre_actuation_times: [8, 12]
post_actuation_times: [20, 24]

View File

@@ -0,0 +1,66 @@
task_name: TurnOnFaucet
category_name: Articulated
tool_keypoint_name_list: [tool_head, tool_tail, tool_side]
object_keypoint_name_list: [articulated_object_head]
constraint_list:
# ensure the gripper to hold the faucet handle with fingers
- keypoint_name: tool_head
target_keypoint_name: articulated_object_head
tolerance: 0.0001
type: point2point_constraint
# ensure the gripper to point straight to the table
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_tail
target_axis: [0, 0, 1.0]
target_axis_frame: world
tolerance: 0.01
target_inner_product: 1
type: frame_axis_parallel
# ensure the axis between gripper finger tips to be parallel to the table
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis: [0, 0, 1.0]
target_axis_frame: world
tolerance: 0.01
type: frame_axis_orthogonal
target_inner_product: 0
# ensure the axis between gripper finger tips to be orthogonal to the faucet handle
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis: [0, 0, 1.0]
target_axis_frame: object
tolerance: 0.01
type: frame_axis_orthogonal
target_inner_product: 0
- axis_from_keypoint_name: tool_head
axis_to_keypoint_name: tool_side
target_axis: [1.0, 0, 0]
target_axis_frame: object
tolerance: 0.01
type: frame_axis_parallel
target_inner_product: 1
# for pre-actuation and post-actuation motions.
# Each motion is represented in the [mode,value] format
# mode: translate or rotate
# value: [x,y,z] in the tool frame for translate or radian for rotate
# Units are in meters and radians respectively.
# pre-actuation pose list.
pre_actuation_motions: [["translate_y", 0.12], ["translate_z", -0.15]] # [move above handle]
# post-actuation pose list. pushing down
post_actuation_motions: [["translate_z", 0.05], ["translate_y", -0.08], ["translate_x", 0.05]] # [turn the handle by 90 degree from left to right, turn the handle by 180 degree from left to right]
# trajectory time
actuation_time: 16 # time to reach task goal pose
pre_actuation_times: [8, 12]
post_actuation_times: [20, 24, 28]

View File

@@ -0,0 +1,277 @@
[
{
"constraint_list": [
{
"keypoint_name": "tool_tail",
"target_keypoint_name": "articulated_object_head",
"tolerance": 0.0001,
"type": "point2point_constraint"
},
{
"axis_from_keypoint_name": "tool_head",
"axis_to_keypoint_name": "tool_side",
"target_axis": [
1.0,
0,
0
],
"target_axis_frame": "object",
"tolerance": 0.01,
"target_inner_product": 1,
"type": "frame_axis_parallel"
},
{
"axis_from_keypoint_name": "tool_head",
"axis_to_keypoint_name": "tool_tail",
"target_axis_from_keypoint_name": "articulated_object_head",
"target_axis_to_keypoint_name": "articulated_object_tail",
"tolerance": 0.01,
"target_inner_product": -1,
"type": "keypoint_axis_parallel"
},
{
"axis_from_keypoint_name": "tool_head",
"axis_to_keypoint_name": "tool_side",
"target_axis_from_keypoint_name": "articulated_object_head",
"target_axis_to_keypoint_name": "articulated_object_tail",
"tolerance": 0.01,
"target_inner_product": 0,
"type": "keypoint_axis_orthogonal"
},
{
"axis_from_keypoint_name": "tool_head",
"axis_to_keypoint_name": "tool_tail",
"target_axis": [
1.0,
0,
0
],
"target_axis_frame": "object",
"tolerance": 0.01,
"target_inner_product": 0,
"type": "frame_axis_orthogonal"
}
]
},
{
"constraint_list": [
{
"keypoint_name": "tool_head",
"target_keypoint_name": "articulated_object_head",
"tolerance": 0.0001,
"type": "point2point_constraint"
},
{
"axis_from_keypoint_name": "tool_head",
"axis_to_keypoint_name": "tool_side",
"target_axis": [
1.0,
0,
0
],
"target_axis_frame": "object",
"tolerance": 0.01,
"target_inner_product": 1,
"type": "frame_axis_parallel"
},
{
"axis_from_keypoint_name": "tool_head",
"axis_to_keypoint_name": "tool_tail",
"target_axis": [
0,
0,
1.0
],
"target_axis_frame": "object",
"tolerance": 0.01,
"target_inner_product": 0,
"type": "frame_axis_orthogonal"
}
]
},
{
"constraint_list": [
{
"keypoint_name": "tool_tail",
"target_keypoint_name": "articulated_object_head",
"tolerance": 0.0001,
"type": "point2point_constraint"
},
{
"axis_from_keypoint_name": "tool_head",
"axis_to_keypoint_name": "tool_side",
"target_axis": [
1.0,
0,
0
],
"target_axis_frame": "object",
"tolerance": 0.01,
"target_inner_product": 1,
"type": "frame_axis_parallel"
},
{
"axis_from_keypoint_name": "tool_head",
"axis_to_keypoint_name": "tool_tail",
"target_axis": [
0,
0,
1.0
],
"target_axis_frame": "object",
"tolerance": 0.01,
"target_inner_product": 0,
"type": "frame_axis_orthogonal"
}
]
},
{
"constraint_list": [
{
"keypoint_name": "tool_head",
"target_keypoint_name": "rigidbody_object_head",
"tolerance": 0.0001,
"type": "point2point_constraint"
},
{
"axis_from_keypoint_name": "tool_head",
"axis_to_keypoint_name": "tool_side",
"target_axis": [
1.0,
0,
0
],
"target_axis_frame": "object",
"tolerance": 0.01,
"target_inner_product": -1,
"type": "frame_axis_parallel"
},
{
"axis_from_keypoint_name": "tool_head",
"axis_to_keypoint_name": "tool_tail",
"target_axis": [
0,
0,
1.0
],
"target_axis_frame": "world",
"tolerance": 0.01,
"target_inner_product": 1,
"type": "frame_axis_parallel"
}
]
},
{
"constraint_list": [
{
"keypoint_name": "tool_head",
"target_keypoint_name": "rigidbody_object_head",
"tolerance": 0.001,
"type": "point2point_constraint"
},
{
"axis_from_keypoint_name": "tool_head",
"axis_to_keypoint_name": "tool_side",
"target_axis": [
0,
1.0,
0
],
"target_axis_frame": "object",
"tolerance": 0.01,
"target_inner_product": -1,
"type": "frame_axis_parallel"
},
{
"axis_from_keypoint_name": "tool_head",
"axis_to_keypoint_name": "tool_tail",
"target_axis": [
0,
0,
1.0
],
"target_axis_frame": "world",
"tolerance": 0.01,
"target_inner_product": 1,
"type": "frame_axis_parallel"
}
]
},
{
"constraint_list": [
{
"keypoint_name": "tool_tail",
"target_keypoint_name": "articulated_object_head",
"tolerance": 0.0001,
"type": "point2point_constraint"
},
{
"axis_from_keypoint_name": "tool_head",
"axis_to_keypoint_name": "tool_side",
"target_axis": [
0,
1.0,
0
],
"target_axis_frame": "object",
"tolerance": 0.01,
"target_inner_product": -1,
"type": "frame_axis_parallel"
},
{
"axis_from_keypoint_name": "tool_head",
"axis_to_keypoint_name": "tool_tail",
"target_axis": [
1.0,
0,
0
],
"target_axis_frame": "object",
"tolerance": 0.01,
"target_inner_product": 1,
"type": "frame_axis_parallel"
}
]
},
{
"constraint_list": [
{
"keypoint_name": "tool_tail",
"target_keypoint_name": "articulated_object_head",
"tolerance": 0.0001,
"type": "point2point_constraint"
},
{
"axis_from_keypoint_name": "tool_head",
"axis_to_keypoint_name": "tool_side",
"target_axis": [
0,
1.0,
0
],
"target_axis_frame": "object",
"tolerance": 0.01,
"target_inner_product": 1,
"type": "frame_axis_parallel"
},
{
"axis_from_keypoint_name": "tool_head",
"axis_to_keypoint_name": "tool_tail",
"target_axis": [
1.0,
0,
0
],
"target_axis_frame": "object",
"tolerance": 0.01,
"target_inner_product": -1,
"type": "frame_axis_parallel"
}
]
}
]

View File

@@ -0,0 +1,13 @@
[
{
"post_actuation_motions": ["move-backward", "move-left"]
},
{
"post_actuation_motions": ["move-backward"]
},
{
"post_actuation_motions": ["move-forward", "move-up"]
}
]

View File

@@ -0,0 +1,21 @@
[
{
"pre_actuation_motions": [["translate_x", -0.1], ["translate_z", -0.15]]
},
{
"pre_actuation_motions": [["translate_x", 0.1], ["translate_z", -0.1]]
},
{
"pre_actuation_motions": [["translate_z", -0.1]]
},
{
"pre_actuation_motions": [["translate_z", -0.15], ["translate_x", -0.1]]
},
{
"pre_actuation_motions": [["translate_z", -0.15], ["translate_x", 0.1]]
}
]

View File

@@ -0,0 +1,319 @@
import functools
import numpy as np
from solver.kpam import mp_terms, term_spec
from solver.kpam.optimization_problem import OptimizationProblemkPAM
from solver.kpam.transformations import affine_matrix_from_points
class OptimizationBuilderkPAM(object):
def __init__(self, specification):
self._specification = specification
def _check_and_get_keypoint_idx(self, keypoint_name, input_idx=-1):
# type: (str, int) -> int
"""
Given the keypoint name, return the index for that keypoint.
If input_idx is valid, also verify two index match each other
:param keypoint_name:
:param input_idx:
:return:
"""
assert keypoint_name in self._specification.keypoint_name2idx
idx = self._specification.keypoint_name2idx[keypoint_name]
if input_idx >= 0:
assert idx == input_idx
return idx
def build_optimization(self, keypoint_observation): # type: (np.ndarray) -> OptimizationProblemkPAM
"""
Given the observed keypoint, build a mathematical program whose
solution is the transform that map keypoint_observation to target
:param keypoint_observation: (N, 3) keypoint location
:return:
"""
# Basic check
assert keypoint_observation.shape[1] == 3
assert keypoint_observation.shape[0] == len(self._specification.keypoint_name2idx)
# Empty initialization of optimization problem
problem = OptimizationProblemkPAM()
problem.T_init = np.eye(4)
problem.mp = None
problem.xyzrpy = None
problem.has_solution = False
# Build the initial transformation
problem.T_init = self._build_initial_transformation(keypoint_observation)
# Build the mathematical_program with decision variable
problem.build_empty_mp()
# Process the cost terms
for cost in self._specification.cost_list:
self._process_cost_term(problem, keypoint_observation, cost)
# Process the constraint terms
for constraint in self._specification.constraint_list:
self._process_constraint_term(problem, keypoint_observation, constraint)
return problem
def _build_initial_transformation(self, keypoint_observation): # type: (np.ndarray) -> np.ndarray
# The container for all keypoint that has nominal target
from_keypoint = []
to_keypoint = []
# Get all the keypoint that has nominal target
keypoint_name2idx = self._specification.keypoint_name2idx
for keypoint_name in keypoint_name2idx:
exist_target, location = self._specification.get_nominal_target_position(keypoint_name)
if exist_target:
to_keypoint.append(location)
idx = keypoint_name2idx[keypoint_name]
from_keypoint.append(
[
keypoint_observation[idx, 0],
keypoint_observation[idx, 1],
keypoint_observation[idx, 2],
]
)
# Depends on whether we have enough keypoints
assert len(from_keypoint) == len(to_keypoint)
if len(from_keypoint) < 3:
return np.eye(4)
# There exists enough keypoint, let's compute using karbas algorithm
# The np array are (3, N) format of keypoints
from_keypoint_np = np.zeros(shape=(3, len(from_keypoint)))
to_keypoint_np = np.zeros_like(from_keypoint_np)
for i in range(len(from_keypoint)):
for j in range(3):
from_keypoint_np[j, i] = from_keypoint[i][j]
to_keypoint_np[j, i] = to_keypoint[i][j]
T = affine_matrix_from_points(from_keypoint_np, to_keypoint_np, shear=False, scale=False, usesvd=True)
return T
def _process_cost_term(self, problem, keypoint_observation, cost):
# type: (OptimizationProblemkPAM, np.ndarray, term_spec.OptimizationTermSpec) -> None
"""
Global dispatcher for the cost term
:param problem:
:param cost:
:return:
"""
if isinstance(cost, term_spec.Point2PointCostL2Spec):
self._process_keypoint_l2_cost_term(problem, keypoint_observation, cost)
elif isinstance(cost, term_spec.Point2PlaneCostSpec):
self._process_point2plane_cost_term(problem, keypoint_observation, cost)
else:
raise RuntimeError("The cost term type is unknown/not implemented")
def _process_constraint_term(self, problem, keypoint_observation, constraint):
# type: (OptimizationProblemkPAM, np.ndarray, term_spec.OptimizationTermSpec) -> None
"""
Global dispatcher for constraint term
:param problem:
:param keypoint_observation:
:param constraint:
:return:
"""
if isinstance(constraint, term_spec.Point2PointConstraintSpec):
self._process_point2point_constraint_term(problem, keypoint_observation, constraint)
elif isinstance(constraint, term_spec.AxisAlignmentConstraintSpec):
self._process_axis_alignment_constraint_term(problem, constraint)
elif isinstance(constraint, term_spec.KeypointAxisAlignmentConstraintSpec):
self._process_keypoint_axisalign_constraint_term(problem, keypoint_observation, constraint)
elif isinstance(constraint, term_spec.KeypointAxisOrthogonalConstraintSpec):
self._process_keypoint_axisorthogonal_constraint_term(problem, keypoint_observation, constraint)
else:
raise RuntimeError("The constraint type is unknown/not implemented")
def _process_keypoint_l2_cost_term(self, problem, keypoint_observation, cost):
# type: (OptimizationProblemkPAM, np.ndarray, term_spec.Point2PointCostL2Spec) -> None
idx = self._check_and_get_keypoint_idx(cost.keypoint_name, cost.keypoint_idx)
# The current location
from_keypoints = keypoint_observation[idx, :]
from_keypoints = np.reshape(from_keypoints, (1, 3))
# The target location
to_keypoints = np.zeros(shape=(1, 3))
to_keypoints[0, 0] = cost.target_position[0]
to_keypoints[0, 1] = cost.target_position[1]
to_keypoints[0, 2] = cost.target_position[2]
# The cost function
# It is wired that the cost term cannot use symbolic expression
cost_func = functools.partial(
mp_terms.keypoint_l2_cost,
problem,
from_keypoints,
to_keypoints,
cost.penalty_weight,
)
problem.mp.AddCost(cost_func, problem.xyzrpy)
def _process_point2plane_cost_term(self, problem, keypoint_observation, cost):
# type: (OptimizationProblemkPAM, np.ndarray, term_spec.Point2PlaneCostSpec) -> None
idx = self._check_and_get_keypoint_idx(cost.keypoint_name, cost.keypoint_idx)
# The current location
from_keypoints = keypoint_observation[idx, :]
from_keypoints = np.reshape(from_keypoints, (1, 3))
# The target location
to_keypoints = np.zeros(shape=(1, 3))
to_keypoints[0, 0] = cost.target_position[0]
to_keypoints[0, 1] = cost.target_position[1]
to_keypoints[0, 2] = cost.target_position[2]
# The cost function
# It is wired that the cost term cannot use symbolic expression
cost_func = functools.partial(
mp_terms.point2plane_cost,
problem,
from_keypoints,
to_keypoints,
cost.penalty_weight,
cost.plane_normal,
)
problem.mp.AddCost(cost_func, problem.xyzrpy)
def _process_point2point_constraint_term(self, problem, keypoint_observation, constraint):
# type: (OptimizationProblemkPAM, np.ndarray, term_spec.Point2PointConstraintSpec) -> None
idx = self._check_and_get_keypoint_idx(constraint.keypoint_name, constraint.keypoint_idx)
# The current location
from_keypoint = keypoint_observation[idx, :]
transformed_point_symbolic_val = mp_terms.transform_point_symbolic(problem, from_keypoint, problem.xyzrpy)
# The target location
to_keypoints = np.asarray(constraint.target_position).copy()
to_keypoints_lb = to_keypoints - constraint.tolerance
to_keypoints_ub = to_keypoints + constraint.tolerance
for j in range(3):
problem.mp.AddConstraint(transformed_point_symbolic_val[j] >= to_keypoints_lb[j])
problem.mp.AddConstraint(transformed_point_symbolic_val[j] <= to_keypoints_ub[j])
@staticmethod
def _process_axis_alignment_constraint_term(problem, constraint):
# type: (OptimizationProblemkPAM, np.ndarray, term_spec.AxisAlignmentConstraintSpec) -> None
assert constraint.tolerance < 1
assert constraint.tolerance > 0
# Check the axis
from_axis = np.asarray(constraint.from_axis, dtype=np.float32).copy() # type: np.ndarray
to_axis = np.asarray(constraint.target_axis, dtype=np.float32).copy() # type: np.ndarray
norm_from = np.linalg.norm(from_axis)
norm_to = np.linalg.norm(to_axis)
if norm_from < 1e-4 or norm_to < 1e-4:
print("Warning: the axis is zero, the constraint is not added to optimization.")
return
# Do normalization
from_axis *= 1.0 / norm_from
to_axis *= 1.0 / norm_to
# Build the term
vector_dot_symbolic_val = mp_terms.vector_dot_symbolic(problem, from_axis, to_axis, problem.xyzrpy)
# Add to mp
problem.mp.AddConstraint(
vector_dot_symbolic_val >= constraint.target_inner_product - constraint.tolerance
) # - constraint.target_inner_product # ← One space after #
problem.mp.AddConstraint(
vector_dot_symbolic_val <= constraint.target_inner_product + constraint.tolerance
) # - constraint.target_inner_product # ← One space after #
def _process_keypoint_axisalign_constraint_term(self, problem, keypoint_observation, constraint):
# type: (OptimizationProblemkPAM, np.ndarray, term_spec.KeypointAxisAlignmentConstraintSpec) -> None
from_idx = self._check_and_get_keypoint_idx(
constraint.axis_from_keypoint_name, constraint.axis_from_keypoint_idx
)
to_idx = self._check_and_get_keypoint_idx(constraint.axis_to_keypoint_name, constraint.axis_to_keypoint_idx)
# Compute the axis
from_point = keypoint_observation[from_idx, :]
to_point = keypoint_observation[to_idx, :]
vector = to_point - from_point
norm_vec = np.linalg.norm(vector)
if norm_vec < 1e-4:
print("Warning: the axis is zero, the constraint is not added to optimization.")
return
vector *= 1.0 / norm_vec
# Build axis alignment constraint
axis_constraint = term_spec.AxisAlignmentConstraintSpec()
axis_constraint.from_axis = [vector[0], vector[1], vector[2]]
axis_constraint.target_axis = constraint.target_axis
axis_constraint.tolerance = constraint.tolerance
axis_constraint.target_inner_product = constraint.target_inner_product
# OK
self._process_axis_alignment_constraint_term(problem, axis_constraint)
@staticmethod
def _process_axis_orthogonal_constraint_term(problem, constraint):
# make the axis orthogonal as the constraint
# type: (OptimizationProblemkPAM, np.ndarray, term_spec.AxisAlignmentConstraintSpec) -> None
assert constraint.tolerance < 1
assert constraint.tolerance > 0
# Check the axis
from_axis = np.asarray(constraint.from_axis, dtype=np.float32).copy() # type: np.ndarray
to_axis = np.asarray(constraint.target_axis, dtype=np.float32).copy() # type: np.ndarray
norm_from = np.linalg.norm(from_axis)
norm_to = np.linalg.norm(to_axis)
if norm_from < 1e-4 or norm_to < 1e-4:
print("Warning: the axis is zero, the constraint is not added to optimization.")
return
# Do normalization
from_axis *= 1.0 / norm_from
to_axis *= 1.0 / norm_to
# Build the term
vector_dot_symbolic_val = mp_terms.vector_dot_symbolic(problem, from_axis, to_axis, problem.xyzrpy)
# Add to mp
problem.mp.AddConstraint(
vector_dot_symbolic_val - constraint.target_inner_product <= constraint.tolerance
) # 1.0 -
problem.mp.AddConstraint(
vector_dot_symbolic_val - constraint.target_inner_product >= -constraint.tolerance
) # 1.0 -
def _process_keypoint_axisorthogonal_constraint_term(self, problem, keypoint_observation, constraint):
# type: (OptimizationProblemkPAM, np.ndarray, term_spec.KeypointAxisAlignmentConstraintSpec) -> None
from_idx = self._check_and_get_keypoint_idx(
constraint.axis_from_keypoint_name, constraint.axis_from_keypoint_idx
)
to_idx = self._check_and_get_keypoint_idx(constraint.axis_to_keypoint_name, constraint.axis_to_keypoint_idx)
# Compute the axis
from_point = keypoint_observation[from_idx, :]
to_point = keypoint_observation[to_idx, :]
vector = to_point - from_point
norm_vec = np.linalg.norm(vector)
if norm_vec < 1e-4:
print("Warning: the axis is zero, the constraint is not added to optimization.")
return
vector *= 1.0 / norm_vec
# Build axis alignment constraint
axis_constraint = term_spec.AxisAlignmentConstraintSpec()
axis_constraint.from_axis = [vector[0], vector[1], vector[2]]
axis_constraint.target_axis = constraint.target_axis
axis_constraint.tolerance = constraint.tolerance
axis_constraint.target_inner_product = constraint.target_inner_product
# OK
self._process_axis_orthogonal_constraint_term(problem, axis_constraint)
def test_builder():
"""Legacy test entry; no longer used."""
raise RuntimeError("test_builder is deprecated and should not be called.")

View File

@@ -0,0 +1,75 @@
import numpy as np
from solver.kpam import SE3_utils
from solver.kpam.optimization_problem import OptimizationProblemkPAM
def keypoint_l2_cost(problem, from_keypoints, goal_keypoints, weight, xyzrpy):
# type: (OptimizationProblemkPAM, np.ndarray, np.ndarray, float, np.ndarray) -> np.ndarray
"""
:param from_keypoints: (N, 3) np.ndarray with float
:param goal_keypoints: (N, 3) np.ndarray with float
:param weight: (N, 3) np.ndarray with float
:param xyzrpy: np.ndarray with potentially symbolic variable
:return: The cost value
"""
cost = 0.0
T_eps = SE3_utils.xyzrpy_to_matrix(xyzrpy)
T = np.dot(problem.T_init, T_eps)
# The iteration on keypoints
num_keypoints = from_keypoints.shape[0]
for i in range(num_keypoints):
q = SE3_utils.transform_point(T, from_keypoints[i, :])
q_goal = goal_keypoints[i, :]
delta = q - q_goal
cost += np.dot(delta, delta)
return weight * cost
def point2plane_cost(problem, from_keypoints, goal_keypoints, weight, plane_normal, xyzrpy):
cost = 0.0
T_eps = SE3_utils.xyzrpy_to_matrix(xyzrpy)
T = np.dot(problem.T_init, T_eps)
# The iteration on keypoints
num_keypoints = from_keypoints.shape[0]
for i in range(num_keypoints):
q = SE3_utils.transform_point(T, from_keypoints[i, :])
q_goal = goal_keypoints[i, :]
delta = q - q_goal
delta_normal = np.dot(delta, plane_normal)
cost += np.dot(delta_normal, delta_normal)
return weight * cost
def vector_dot_symbolic(problem, from_axis, to_axis, xyzrpy):
# type: (OptimizationProblemkPAM, np.ndarray, np.ndarray, np.ndarray) -> np.ndarray
"""
dot(T.dot(from_axis), to_axis)
:param problem:
:param from_axis:
:param to_axis:
:param xyzrpy:
:return:
"""
T_eps = SE3_utils.xyzrpy_to_matrix_symbolic(xyzrpy)
T = np.dot(problem.T_init, T_eps)
R = T[0:3, 0:3]
transformed_from = np.dot(R, from_axis)
return np.dot(transformed_from, to_axis)
def transform_point_symbolic(problem, from_keypoint, xyzrpy):
# type: (OptimizationProblemkPAM, np.ndarray, np.ndarray) -> np.ndarray
"""
Transform the point using symbolic expression
:param problem:
:param from_keypoint: np.ndarray with float
:param xyzrpy: np.ndarray with symbolic variable
:return:
"""
T_eps = SE3_utils.xyzrpy_to_matrix_symbolic(xyzrpy)
T = np.dot(problem.T_init, T_eps)
return SE3_utils.transform_point(T, from_keypoint)

View File

@@ -0,0 +1,77 @@
import numpy as np
from pydrake.all import MathematicalProgram, Solve
from solver.kpam import SE3_utils
class OptimizationProblemkPAM(object):
# The property about initial transformation
# There is always an initial guess, either identity or from keypoint matching
T_init = np.ndarray(shape=(4, 4)) # type: np.ndarray
# The final mathematical program
mp = None # type: MathematicalProgram
xyzrpy = None # type: np.ndarray
# The solution to the program
has_solution = False
xyzrpy_sol = np.ndarray(shape=(6,))
# T_action.dot(observed_keypoint) = target_keypoint
T_action = np.ndarray(shape=(4, 4))
# Build empty problem
def build_empty_mp(self):
# Construct the problem
mp = MathematicalProgram()
xyz = mp.NewContinuousVariables(3, "xyz")
rpy = mp.NewContinuousVariables(3, "rpy")
xyz_rpy = np.concatenate((xyz, rpy))
mp.SetInitialGuessForAllVariables(np.zeros(6))
# Store the result to problem
self.mp = mp
self.xyzrpy = xyz_rpy
class OptimizationProblemkPAMJoint(object):
# The property about initial transformation
# There is always an initial guess, either identity or from keypoint matching
T_init = np.ndarray(shape=(4, 4)) # type: np.ndarray
# The final mathematical program
mp = None # type: MathematicalProgram
xyzrpy = None # type: np.ndarray
# The solution to the program
has_solution = False
xyzrpy_sol = np.ndarray(shape=(6,))
# T_action.dot(observed_keypoint) = target_keypoint
T_action = np.ndarray(shape=(4, 4))
# Build empty problem
def build_empty_mp(self):
# Construct the problem
mp = MathematicalProgram()
xyz = mp.NewContinuousVariables(3, "xyz")
rpy = mp.NewContinuousVariables(3, "rpy")
xyz_rpy = np.concatenate((xyz, rpy))
mp.SetInitialGuessForAllVariables(np.zeros(6))
# Store the result to problem
self.mp = mp
self.xyzrpy = xyz_rpy
def solve_kpam(problem): # type: (OptimizationProblemkPAM) -> bool
result = Solve(problem.mp) # mp.
if not result.is_success():
problem.has_solution = False
return False
# Save the result to problem
problem.xyzrpy_sol = result.get_x_val() # problem.mp.GetSolution(problem.xyzrpy)
T_eps = SE3_utils.xyzrpy_to_matrix(xyzrpy=problem.xyzrpy_sol)
problem.T_action = np.dot(problem.T_init, T_eps)
problem.has_solution = True
# OK
return True

View File

@@ -0,0 +1,219 @@
import copy
from typing import List
import yaml
from solver.kpam import term_spec
from solver.kpam.term_spec import OptimizationTermSpec
class OptimizationProblemSpecification(object):
"""
The class serves as the interface between the
config file and solver
"""
def __init__(
self,
task_name="", # type: str
category_name="", # type: str
tool_keypoint_name_list=None, # type: List[str]
object_keypoint_name_list=None, # type: List[str]
):
"""
The optimization spec can be constructed in python code
or load from yaml config path. In latter case, these
parameters can left default and use load_from_config method
:param task_name:
:param category_name:
:param keypoint_name_list:
"""
self._task_name = task_name # type: str
self._category_name = category_name # type: str
# The default construction of list
if tool_keypoint_name_list is not None:
self._tool_keypoint_name_list = tool_keypoint_name_list # type: List[str]
else:
self._tool_keypoint_name_list = []
if object_keypoint_name_list is not None:
self._object_keypoint_name_list = object_keypoint_name_list # type: List[str]
else:
self._object_keypoint_name_list = []
# By default, nothing here
self._cost_list = [] # type: List[OptimizationTermSpec]
self._constraint_list = [] # type: List[OptimizationTermSpec]
# # Build from keypoint name list
# self._keypoint_name2idx = OrderedDict() # type: Dict[str, int]
# self.setup_keypoint_mapping()
# The container for explicit nominal position
# The nominal position can either explicit declared
# Or implicit added using Point2PointCost/Constraint
# self._keypoint_nominal_target_position = OrderedDict() # type: Dict[str, List[float]]
# def setup_keypoint_mapping(self):
# self._keypoint_name2idx.clear()
# for i in range(len(self._keypoint_name_list)):
# name = self._keypoint_name_list[i]
# self._keypoint_name2idx[name] = i
# The access interface
@property
def task_name(self):
return self._task_name
@property
def category_name(self):
return self._category_name
# @property
# def keypoint_name2idx(self):
# return self._keypoint_name2idx
@property
def cost_list(self):
return self._cost_list
@property
def constraint_list(self):
return self._constraint_list
# # The method to manipulate nominal target
# def add_nominal_target_position(self, keypoint_name, nominal_target): # type: (str, List[float]) -> bool
# # Check the existence of keypoint
# if keypoint_name not in self._keypoint_name2idx:
# return False
# # Check the shape of target
# if len(nominal_target) != 3:
# return False
# # OK
# self._keypoint_nominal_target_position[keypoint_name] = nominal_target
# return True
# def get_nominal_target_position(self, keypoint_name): # type: (str) -> (bool, List[float])
# # If explicitly defined
# if keypoint_name in self._keypoint_nominal_target_position:
# return True, self._keypoint_nominal_target_position[keypoint_name]
# # Else search for constraints
# for cost_term in self.cost_list:
# if isinstance(cost_term, Point2PointCostL2Spec):
# if cost_term.keypoint_name == keypoint_name:
# return True, cost_term.target_position
# for constraint_term in self.constraint_list:
# if isinstance(constraint_term, Point2PointConstraintSpec):
# if constraint_term.keypoint_name == keypoint_name:
# return True, constraint_term.target_position
# # Not available
# return False, []
# The method to modify the specification from python
def add_cost(self, cost_term): # type: (OptimizationTermSpec) -> bool
if not cost_term.is_cost():
return False
copied = copy.deepcopy(cost_term)
self._cost_list.append(copied)
return True
def add_constraint(self, constraint_term): # type: (OptimizationTermSpec) -> bool
if constraint_term.is_cost():
return False
copied = copy.deepcopy(constraint_term)
self._constraint_list.append(copied)
return True
def add_optimization_term(self, optimization_term): # type: (OptimizationTermSpec) -> bool
if optimization_term.is_cost():
return self.add_cost(optimization_term)
else:
return self.add_constraint(optimization_term)
# The interface from/to yaml
def write_to_yaml(self, yaml_save_path): # type: (str) -> None
data_map = {}
data_map["task_name"] = self._task_name
data_map["category_name"] = self._category_name
data_map["tool_keypoint_name_list"] = self._tool_keypoint_name_list
data_map["object_keypoint_name_list"] = self._object_keypoint_name_list
# # For cost terms
# cost_map_list = []
# for cost in self._cost_list:
# cost_i_map = cost.to_dict()
# cost_i_map["type"] = cost.type_name()
# cost_map_list.append(cost_i_map)
# data_map["cost_list"] = cost_map_list
# For constraint terms
constraint_map_list = []
for constraint in self._constraint_list:
constraint_i_map = constraint.to_dict()
constraint_i_map["type"] = constraint.type_name()
constraint_map_list.append(constraint_i_map)
data_map["constraint_list"] = constraint_map_list
# Save to yaml
with open(yaml_save_path, mode="w", encoding="utf-8") as save_file:
yaml.dump(data_map, save_file)
def load_from_config(self, data_map): # type: (str) -> bool
# Basic meta
self._task_name = data_map["task_name"]
self._category_name = data_map["category_name"]
self._tool_keypoint_name_list = data_map["tool_keypoint_name_list"]
self._object_keypoint_name_list = data_map["object_keypoint_name_list"]
# self._keypoint_name2idx.clear()
# self.setup_keypoint_mapping()
# For cost terms
# cost_map_list = data_map["cost_list"]
# self._cost_list = []
# for cost in cost_map_list:
# cost_type = cost["type"] # type: str
# if cost_type == term_spec.Point2PointCostL2Spec.type_name():
# cost_spec = term_spec.Point2PointCostL2Spec()
# cost_spec.from_dict(cost)
# self._cost_list.append(cost_spec)
# elif cost_type == term_spec.Point2PlaneCostSpec.type_name():
# cost_spec = term_spec.Point2PlaneCostSpec()
# cost_spec.from_dict(cost)
# self._cost_list.append(cost_spec)
# else:
# raise RuntimeError("Unknown cost type %s" % cost_type)
# For constraint terms
constraint_map_list = data_map["constraint_list"]
self._constraint_list = []
for constraint in constraint_map_list:
constraint_type = constraint["type"]
if constraint_type == term_spec.Point2PointConstraintSpec.type_name():
constraint_spec = term_spec.Point2PointConstraintSpec()
constraint_spec.from_dict(constraint)
self._constraint_list.append(constraint_spec)
elif constraint_type == term_spec.KeypointAxisParallelConstraintSpec.type_name():
constraint_spec = term_spec.KeypointAxisParallelConstraintSpec()
constraint_spec.from_dict(constraint)
self._constraint_list.append(constraint_spec)
elif constraint_type == term_spec.KeypointAxisOrthogonalConstraintSpec.type_name():
constraint_spec = term_spec.KeypointAxisOrthogonalConstraintSpec()
constraint_spec.from_dict(constraint)
self._constraint_list.append(constraint_spec)
elif constraint_type == term_spec.FrameAxisParallelConstraintSpec.type_name():
constraint_spec = term_spec.FrameAxisParallelConstraintSpec()
constraint_spec.from_dict(constraint)
self._constraint_list.append(constraint_spec)
elif constraint_type == term_spec.FrameAxisOrthogonalConstraintSpec.type_name():
constraint_spec = term_spec.FrameAxisOrthogonalConstraintSpec()
constraint_spec.from_dict(constraint)
self._constraint_list.append(constraint_spec)
else:
raise RuntimeError(f"Unknown constraint type {constraint_type}")
return True

View File

@@ -0,0 +1,294 @@
from typing import List
import attr
# A very thin base class for all
# the specification of the terms
class OptimizationTermSpec(object):
# The type of this method
@staticmethod
def type_name(): # type: () -> str
raise NotImplementedError
@staticmethod
def is_cost(): # type: () -> bool
raise NotImplementedError
# The method for serialization
def to_dict(self):
raise NotImplementedError
def from_dict(self, data_map):
raise NotImplementedError
# For the specification of constraint types:
# @attr.s
# class Point2PointConstraintSpec(OptimizationTermSpec):
# # 1 - tolerance <= dot(from, to) <= 1
# keypoint_name = "" # type: str
# keypoint_idx = -1 # type: int
# target_position = [0, 0, 0] # type: List[float]
# tolerance = 0.001 # type: float
# @staticmethod
# def type_name():
# return "point2point_constraint"
# @staticmethod
# def is_cost(): # type: () -> bool
# return False
# def to_dict(self):
# constraint_dict = dict()
# constraint_dict["keypoint_name"] = self.keypoint_name
# constraint_dict["keypoint_idx"] = self.keypoint_idx
# constraint_dict["target_position"] = self.target_position
# constraint_dict["tolerance"] = self.tolerance
# return constraint_dict
# def from_dict(self, data_map):
# self.keypoint_name = data_map["keypoint_name"]
# self.keypoint_idx = data_map["keypoint_idx"]
# self.target_position = data_map["target_position"]
# self.tolerance = data_map["tolerance"]
@attr.s
class Point2PointConstraintSpec(OptimizationTermSpec):
# 1 - tolerance <= dot(from, to) <= 1
keypoint_name = "" # type: str
keypoint_idx = -1 # type: int
target_keypoint_name = "" # type: List[float]
target_keypoint_idx = -1 # type: float
tolerance = 0.001 # type: float
@staticmethod
def type_name():
return "point2point_constraint"
@staticmethod
def is_cost(): # type: () -> bool
return False
def to_dict(self):
constraint_dict = {}
constraint_dict["keypoint_name"] = self.keypoint_name
constraint_dict["target_keypoint_name"] = self.target_keypoint_name
constraint_dict["tolerance"] = self.tolerance
return constraint_dict
def from_dict(self, data_map):
self.keypoint_name = data_map["keypoint_name"]
self.target_keypoint_name = data_map["target_keypoint_name"]
self.tolerance = data_map["tolerance"]
@attr.s
class KeypointAxisParallelConstraintSpec(OptimizationTermSpec):
axis_from_keypoint_name = "" # type: str
axis_to_keypoint_name = "" # type: str
target_axis_from_keypoint_name = "" # type: str
target_axis_to_keypoint_name = "" # type: str
tolerance = 0.1 # type: float
target_inner_product = 1
@staticmethod
def type_name():
return "keypoint_axis_parallel"
@staticmethod
def is_cost(): # type: () -> bool
return False
def to_dict(self):
constraint_dict = {}
constraint_dict["axis_from_keypoint_name"] = self.axis_from_keypoint_name
constraint_dict["axis_to_keypoint_name"] = self.axis_to_keypoint_name
constraint_dict["target_axis_from_keypoint_name"] = self.target_axis_from_keypoint_name
constraint_dict["target_axis_to_keypoint_name"] = self.target_axis_to_keypoint_name
constraint_dict["tolerance"] = self.tolerance
constraint_dict["target_inner_product"] = self.target_inner_product
return constraint_dict
def from_dict(self, data_map):
self.axis_from_keypoint_name = data_map["axis_from_keypoint_name"]
self.axis_to_keypoint_name = data_map["axis_to_keypoint_name"]
self.target_axis_from_keypoint_name = data_map["target_axis_from_keypoint_name"]
self.target_axis_to_keypoint_name = data_map["target_axis_to_keypoint_name"]
self.tolerance = data_map["tolerance"]
self.target_inner_product = data_map["target_inner_product"]
@attr.s
class KeypointAxisOrthogonalConstraintSpec(OptimizationTermSpec):
axis_from_keypoint_name = "" # type: str
axis_to_keypoint_name = "" # type: str
target_axis_from_keypoint_name = "" # type: str
target_axis_to_keypoint_name = "" # type: str
tolerance = 0.01 # type: float
target_inner_product = 0
@staticmethod
def type_name():
return "keypoint_axis_orthogonal"
@staticmethod
def is_cost(): # type: () -> bool
return False
def to_dict(self):
constraint_dict = {}
constraint_dict["axis_from_keypoint_name"] = self.axis_from_keypoint_name
constraint_dict["axis_to_keypoint_name"] = self.axis_to_keypoint_name
constraint_dict["target_axis_from_keypoint_name"] = self.target_axis_from_keypoint_name
constraint_dict["target_axis_to_keypoint_name"] = self.target_axis_to_keypoint_name
constraint_dict["tolerance"] = self.tolerance
constraint_dict["target_inner_product"] = self.target_inner_product
return constraint_dict
def from_dict(self, data_map):
self.axis_from_keypoint_name = data_map["axis_from_keypoint_name"]
self.axis_to_keypoint_name = data_map["axis_to_keypoint_name"]
self.target_axis_from_keypoint_name = data_map["target_axis_from_keypoint_name"]
self.target_axis_to_keypoint_name = data_map["target_axis_to_keypoint_name"]
self.tolerance = data_map["tolerance"]
self.target_inner_product = data_map["target_inner_product"]
@attr.s
class FrameAxisOrthogonalConstraintSpec(OptimizationTermSpec):
axis_from_keypoint_name = "" # type: str
axis_to_keypoint_name = "" # type: str
target_axis = [0, 0, 1] # type: List[float]
target_axis_frame = ""
tolerance = 0.01 # type: float
target_inner_product = 0
@staticmethod
def type_name():
return "frame_axis_orthogonal"
@staticmethod
def is_cost(): # type: () -> bool
return False
def to_dict(self):
constraint_dict = {}
constraint_dict["axis_from_keypoint_name"] = self.axis_from_keypoint_name
constraint_dict["axis_to_keypoint_name"] = self.axis_to_keypoint_name
constraint_dict["target_axis"] = self.target_axis
constraint_dict["target_axis_frame"] = self.target_axis_frame
constraint_dict["tolerance"] = self.tolerance
constraint_dict["target_inner_product"] = self.target_inner_product
return constraint_dict
def from_dict(self, data_map):
self.axis_from_keypoint_name = data_map["axis_from_keypoint_name"]
self.axis_to_keypoint_name = data_map["axis_to_keypoint_name"]
self.target_axis = data_map["target_axis"]
self.target_axis_frame = data_map["target_axis_frame"]
self.tolerance = data_map["tolerance"]
self.target_inner_product = data_map["target_inner_product"]
@attr.s
class FrameAxisParallelConstraintSpec(OptimizationTermSpec):
axis_from_keypoint_name = "" # type: str
axis_to_keypoint_name = "" # type: str
target_axis = [0, 0, 1] # type: List[float]
target_axis_frame = ""
tolerance = 0.1 # type: float
target_inner_product = 0
@staticmethod
def type_name():
return "frame_axis_parallel"
@staticmethod
def is_cost(): # type: () -> bool
return False
def to_dict(self):
constraint_dict = {}
constraint_dict["axis_from_keypoint_name"] = self.axis_from_keypoint_name
constraint_dict["axis_to_keypoint_name"] = self.axis_to_keypoint_name
constraint_dict["target_axis"] = self.target_axis
constraint_dict["target_axis_frame"] = self.target_axis_frame
constraint_dict["tolerance"] = self.tolerance
constraint_dict["target_inner_product"] = self.target_inner_product
return constraint_dict
def from_dict(self, data_map):
self.axis_from_keypoint_name = data_map["axis_from_keypoint_name"]
self.axis_to_keypoint_name = data_map["axis_to_keypoint_name"]
self.target_axis = data_map["target_axis"]
self.target_axis_frame = data_map["target_axis_frame"]
self.tolerance = data_map["tolerance"]
self.target_inner_product = data_map["target_inner_product"]
# For the specification of cost terms
@attr.s
class Point2PointCostL2Spec(OptimizationTermSpec):
keypoint_name = "" # type: str
keypoint_idx = -1 # type: int
target_position = [0, 0, 0] # type: List[float]
penalty_weight = 1.0 # type: float
@staticmethod
def type_name():
return "point2point_cost"
@staticmethod
def is_cost(): # type: () -> bool
return True
def to_dict(self):
constraint_dict = {}
constraint_dict["keypoint_name"] = self.keypoint_name
constraint_dict["keypoint_idx"] = self.keypoint_idx
constraint_dict["target_position"] = self.target_position
constraint_dict["penalty_weight"] = self.penalty_weight
return constraint_dict
def from_dict(self, data_map):
self.keypoint_name = data_map["keypoint_name"]
self.keypoint_idx = data_map["keypoint_idx"]
self.target_position = data_map["target_position"]
self.penalty_weight = data_map["penalty_weight"]
# For the specification of point2plane cost
@attr.s
class Point2PlaneCostSpec(OptimizationTermSpec):
keypoint_name = "" # type: str
keypoint_idx = -1 # type: int
target_position = [0.0, 0.0, 0.0] # type: List[float]
plane_normal = [0.0, 0.0, 1.0] # type: List[float]
penalty_weight = 1.0 # type: float
@staticmethod
def type_name(): # type: () -> str
return "point2plane_cost"
@staticmethod
def is_cost(): # type: () -> bool
return True
def to_dict(self):
constraint_dict = {}
constraint_dict["keypoint_name"] = self.keypoint_name
constraint_dict["keypoint_idx"] = self.keypoint_idx
constraint_dict["target_position"] = self.target_position
constraint_dict["plane_normal"] = self.plane_normal
constraint_dict["penalty_weight"] = self.penalty_weight
return constraint_dict
def from_dict(self, data_map):
self.keypoint_name = data_map["keypoint_name"]
self.keypoint_idx = data_map["keypoint_idx"]
self.target_position = data_map["target_position"]
self.plane_normal = data_map["plane_normal"]
self.penalty_weight = data_map["penalty_weight"]

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,987 @@
# pylint: skip-file
# flake8: noqa
import json
import time
import carb
import IPython
import numpy as np
import solver.kpam.mp_terms as mp_terms
import solver.kpam.SE3_utils as SE3_utils
# The specification of optimization problem
import solver.kpam.term_spec as term_spec
import yaml
from colored import fg
from omni.isaac.core.utils.prims import (
create_prim,
get_prim_at_path,
is_prim_path_valid,
)
from omni.isaac.core.utils.transformations import (
get_relative_transform,
pose_from_tf_matrix,
tf_matrices_from_poses,
)
from omni.isaac.core.utils.xforms import get_world_pose
from pydrake.all import *
from scipy.spatial.transform import Rotation as R
from solver.kpam.mp_builder import OptimizationBuilderkPAM
from solver.kpam.optimization_problem import OptimizationProblemkPAM, solve_kpam
from solver.kpam.optimization_spec import OptimizationProblemSpecification
# import gensim2.env.solver.planner_utils as plannerutils
from solver.planner_utils import *
MOTION_DICT = {
"move-up": ["translate_z", 0.06],
"move-down": ["translate_z", -0.06],
"move-left": ["translate_y", 0.08],
"move-right": ["translate_y", -0.08],
"move-forward": ["translate_x", 0.1],
"move-backward": ["translate_x", -0.1],
}
class KPAMPlanner:
"""
A general class of keypoint-based trajectory optimization methods to solve
robotic tasks. Includes task specific motion.
https://github.com/liruiw/Fleet-Tools/blob/master/core/expert/base_expert.py
Mostly works with simple and kinematic tasks.
"""
def __init__(self, env, robot, object, cfg_path, obj_rot=0, controller=None, draw_points=False, stage=None):
self.env = env
self.robot = robot
self.object = object
self.cfg_path = cfg_path
self.controller = controller
self.obj_rot = obj_rot
self.draw_points = draw_points
self.stage = stage
self.plan_time = 0
self.goal_joint = None
self.kpam_success = False
self.joint_plan_success = False
if "franka" in self.robot.name:
self.ee_name = "panda_hand"
self.robot_dof = 9
elif "split_aloha" in self.robot.name or "lift2" in self.robot.name:
self.ee_name = "link6"
self.robot_dof = 8
# self.ee_name = "panda_hand"
# self.robot_dof = 9
else:
raise NotImplementedError
# objects initial
self.axis_dict = {
"x": [1.0, 0.0, 0.0],
"-x": [-1.0, 0.0, 0.0],
"y": [0.0, 1.0, 0.0],
"-y": [0.0, -1.0, 0.0],
"z": [0.0, 0.0, 1.0],
"-z": [0.0, 0.0, -1.0],
}
if self.draw_points:
from omni.isaac.debug_draw import _debug_draw
self.draw = _debug_draw.acquire_debug_draw_interface()
if isinstance(cfg_path, dict):
self.cfg = cfg_path
else:
self.cfg = yaml.load(open(cfg_path, "r", encoding="utf-8"), Loader=yaml.SafeLoader)
if "actuation_time" in self.cfg: # time cost to reach task goal pose
self.actuation_time = self.cfg["actuation_time"] + self.env.current_time
self.pre_actuation_times = [t + self.env.current_time for t in self.cfg["pre_actuation_times"]]
self.post_actuation_times = [t + self.env.current_time for t in self.cfg["post_actuation_times"]]
# self.pre_actuation_poses_intool = self.cfg["pre_actuation_poses_intool"]
# self.post_actuation_poses_intool = self.cfg["post_actuation_poses_intool"]
self.pre_actuation_motions = self.cfg["pre_actuation_motions"]
self.post_actuation_motions = self.cfg["post_actuation_motions"]
elif "post_actuation_motions" in self.cfg:
self.actuation_time = 24
self.pre_actuation_motions = self.cfg["pre_actuation_motions"]
self.post_actuation_motions = self.cfg["post_actuation_motions"]
self.pre_actuation_times = [24 - 4 * i for i in range(len(self.pre_actuation_motions), 0, -1)]
self.post_actuation_times = [24 + 4 * (i + 1) for i in range(len(self.post_actuation_motions))]
elif "pre_actuation_motions" in self.cfg:
self.actuation_time = 24
self.pre_actuation_motions = self.cfg["pre_actuation_motions"]
self.post_actuation_motions = []
self.pre_actuation_times = [24 - 4 * i for i in range(len(self.pre_actuation_motions), 0, -1)]
self.post_actuation_times = []
# elif "post_actuation_motions" in self.cfg:
# set_trace()
# self.actuation_time = 24 + self.env.current_time
# self.pre_actuation_motions = self.cfg["pre_actuation_motions"]
# self.post_actuation_motions = self.cfg["post_actuation_motions"]
# self.pre_actuation_times = [
# 24 + self.env.current_time - 4 * i
# for i in range(len(self.pre_actuation_motions), 0, -1)
# ]
# self.post_actuation_times = [
# 24 + self.env.current_time + 4 * (i + 1)
# for i in range(len(self.post_actuation_motions))
# ]
elif "pre_actuation_motions" in self.cfg:
self.actuation_time = 24 + self.env.current_time
self.pre_actuation_motions = self.cfg["pre_actuation_motions"]
self.post_actuation_motions = []
self.pre_actuation_times = [
24 - 4 * i + self.env.current_time for i in range(len(self.pre_actuation_motions), 0, -1)
]
self.post_actuation_times = []
if "modify_actuation_motion" in self.cfg:
self.modify_actuation_motion = self.cfg["modify_actuation_motion"]
self.plant, self.fk_context = build_plant(
robot_name=self.robot.name, init_qpos=self.robot.get_joint_positions()
)
self.reset_expert()
def setup(self):
# load keypoints
self.solved_ik_times = []
self.joint_traj_waypoints = []
def reset_expert(self):
"""reinitialize expert state"""
self.joint_space_traj = None
self.task_space_traj = None
self.plan_succeeded = False
self.plan_time = 0.0
self.setup()
def check_plan_empty(self):
"""check if already have a plan"""
return self.joint_space_traj is None
def get_pose_from_translation(self, translation, pre_pose):
"""get the pose from translation"""
pose = np.eye(4)
translation = np.array(translation)
pose[:3, 3] = translation
# pre_pose: robot base frame to ee frame
# pose: ee frame to target pose frame
# actuation_pose: robot base frame to target pose frame
actuation_pose = pre_pose @ pose
return actuation_pose
def get_pose_from_translation_inworld(self, translation, pre_pose):
"""get the pose from translation"""
translation = np.array(translation)
actuation_pose = pre_pose.copy()
actuation_pose[:3, 3] += translation
return actuation_pose
def get_pose_from_rotation(self, rotation, pre_pose):
"""get the pose from rotation"""
vec = np.array(self.get_object_link0_rot_axis())
vec *= rotation
Rot = np.eye(4)
Rot[:3, :3] = R.from_euler("xyz", vec, degrees=False).as_matrix()
# Rot = rotAxis(angle=rotation, axis=axis)
# actuation_pose = (
# self.object_pose @ Rot @ se3_inverse(self.object_pose) @ pre_pose
# )
world2link_pose = self.get_world2link_pose() # object to link
inv_base_pose = se3_inverse(self.base_pose) # self.base_pose : world to robot base
base2link = inv_base_pose.dot(world2link_pose)
base2link = base2link.reshape(4, 4)
actuation_pose = base2link @ Rot @ se3_inverse(base2link) @ pre_pose
return actuation_pose
def generate_actuation_poses(self):
# get key pose constraints during trajectory
"""build the post-activation trajectory specified in the config
(1) reach above the screw
(2) hammer it by moving downward
"""
self.pre_actuation_poses = []
self.post_actuation_poses = []
curr_pose = self.task_goal_hand_pose
for motion in self.pre_actuation_motions:
mode = motion[0]
value = motion[1]
assert mode in ["translate_x", "translate_y", "translate_z", "rotate"]
assert type(value) == float
if mode == "rotate":
curr_pose = self.get_pose_from_rotation(value, curr_pose)
self.pre_actuation_poses.append(curr_pose)
else:
value_vec = [0, 0, 0]
if mode == "translate_x":
value_vec[0] = value
elif mode == "translate_y":
value_vec[1] = value
elif mode == "translate_z":
value_vec[2] = value
curr_pose = self.get_pose_from_translation(value_vec, curr_pose)
self.pre_actuation_poses.append(curr_pose)
self.pre_actuation_poses.reverse()
curr_pose = self.task_goal_hand_pose # pose in robot base frame
for motion in self.post_actuation_motions:
if type(motion) == list:
mode = motion[0]
value = motion[1]
if mode == "rotate":
curr_pose = self.get_pose_from_rotation(value, curr_pose)
self.post_actuation_poses.append(curr_pose)
else:
value_vec = [0, 0, 0]
if mode == "translate_x":
value_vec[0] = value * np.cos(self.obj_rot)
value_vec[1] = value * np.sin(self.obj_rot)
elif mode == "translate_y":
value_vec[0] = -value * np.sin(self.obj_rot)
value_vec[1] = value * np.cos(self.obj_rot)
elif mode == "translate_z":
value_vec[2] = value
curr_pose = self.get_pose_from_translation(value_vec, curr_pose)
self.post_actuation_poses.append(curr_pose)
elif type(motion) == str:
mode = MOTION_DICT[motion][0]
value = MOTION_DICT[motion][1]
value_vec = [0, 0, 0]
if mode == "translate_x":
value_vec[0] = value
elif mode == "translate_y":
value_vec[1] = value
elif mode == "translate_z":
value_vec[2] = value
curr_pose = self.get_pose_from_translation_inworld(value_vec, curr_pose)
self.post_actuation_poses.append(curr_pose)
self.sample_times = (
[self.env.current_time] + self.pre_actuation_times + [self.actuation_time] + self.post_actuation_times
)
self.traj_keyframes = (
[self.ee_pose.reshape(4, 4)]
+ self.pre_actuation_poses
+ [self.task_goal_hand_pose]
+ self.post_actuation_poses
)
# ========================= visualize keypoints =========================
# base2world=get_relative_transform(get_prim_at_path(self.robot.base_path), get_prim_at_path(self.robot.root_prim_path))
# for traj_keyframe in self.traj_keyframes:
# draw.draw_points(
# [(base2world @ np.append(traj_keyframe[:3,3],1))[:3]],
# [(0,0,0,1)], # black
# [7]
# )
# ========================= visualize keypoints =========================
# debug start: KPAMPlanner, refactor all self.task.xxx() calls into planner methods
def get_world2link_pose(self):
revolute_prim = self.stage.GetPrimAtPath(self.object.object_joint_path)
body0_path = str(revolute_prim.GetRelationship("physics:body0").GetTargets()[0]) # base
body0_prim = self.stage.GetPrimAtPath(body0_path)
local_pos = revolute_prim.GetAttribute("physics:localPos0").Get()
local_rot = revolute_prim.GetAttribute("physics:localRot0").Get() # wxyz
local_wxyz = np.array(
[local_rot.GetReal(), local_rot.GetImaginary()[0], local_rot.GetImaginary()[1], local_rot.GetImaginary()[2]]
)
base2joint_pose = np.eye(4)
base2joint_pose[:3, 3] = np.array([local_pos[0], local_pos[1], local_pos[2]])
base2joint_pose[:3, :3] = R.from_quat(local_wxyz, scalar_first=True).as_matrix()
base2joint_pose[:3, 3] = base2joint_pose[:3, 3] * self.object.object_scale[:3] # PL: we need to scale it first
world2joint_pose = self.get_transform_matrix_by_prim_path(body0_path) @ base2joint_pose
return world2joint_pose
def get_tool_keypoints(self):
if "left_arm" in self.controller.robot_file:
self.robot.hand_path = self.robot.fl_hand_path
self.robot.ee_path = self.robot.fl_ee_path
self.robot.gripper_keypoints = self.robot.fl_gripper_keypoints
self.robot.base_path = self.robot.fl_base_path
elif "right_arm" in self.controller.robot_file:
self.robot.hand_path = self.robot.fr_hand_path
self.robot.ee_path = self.robot.fr_ee_path
self.robot.gripper_keypoints = self.robot.fr_gripper_keypoints
self.robot.base_path = self.robot.fr_base_path
transform_matrix = self.get_transform_matrix_by_prim_path(self.robot.hand_path)
tool_head = transform_matrix @ self.robot.gripper_keypoints["tool_head"]
tool_tail = transform_matrix @ self.robot.gripper_keypoints["tool_tail"]
tool_side = transform_matrix @ self.robot.gripper_keypoints["tool_side"]
tool_keypoints = {}
tool_keypoints["tool_head"] = tool_head[:3]
tool_keypoints["tool_tail"] = tool_tail[:3]
tool_keypoints["tool_side"] = tool_side[:3]
return tool_keypoints
def get_transform_matrix_by_prim_path(self, path):
# return world to prim_path transform matrix
position, orientation = get_world_pose(path)
rotation_matrix = R.from_quat(orientation, scalar_first=True).as_matrix()
transform_matrix = np.eye(4)
transform_matrix[:3, :3] = rotation_matrix
transform_matrix[:3, 3] = position
return transform_matrix
def get_object_keypoints(self):
link2world_transform_matrix = self.get_transform_matrix_by_prim_path(self.object.object_link_path)
base2world_transform_matrix = self.get_transform_matrix_by_prim_path(self.object.object_base_path)
# convert rotation matrix to Euler angles
link2world_euler_angles = R.from_matrix(link2world_transform_matrix[:3, :3]).as_euler("xyz", degrees=False)
base2world_euler_angles = R.from_matrix(base2world_transform_matrix[:3, :3]).as_euler("xyz", degrees=False)
print(f"link2world_euler_angles: {link2world_euler_angles}")
print(f"base2world_euler_angles: {base2world_euler_angles}")
self.object_keypoints = self.object.object_keypoints
obejct_keypoints = {}
for key, value in self.object_keypoints.items():
if key == "base_contact_point":
tranformed_keypoints = base2world_transform_matrix @ (value * self.object.object_scale)
obejct_keypoints[key] = tranformed_keypoints[:3]
print(f"base_contact_point: {obejct_keypoints[key]}")
elif key == "base_object_contact_point":
tranformed_keypoints = base2world_transform_matrix @ (value * self.object.object_scale)
obejct_keypoints[key] = tranformed_keypoints[:3]
print(f"base_object_contact_point: {obejct_keypoints[key]}")
elif key == "link_contact_point":
tranformed_keypoints = link2world_transform_matrix @ (value * self.object.object_scale)
obejct_keypoints[key] = tranformed_keypoints[:3]
print(f"link_contact_point: {obejct_keypoints[key]}")
elif key == "articulated_object_head":
tranformed_keypoints = link2world_transform_matrix @ (value * self.object.object_scale)
obejct_keypoints[key] = tranformed_keypoints[:3]
print(f"articulated_object_head: {obejct_keypoints[key]}")
elif key == "articulated_object_tail":
tranformed_keypoints = link2world_transform_matrix @ (value * self.object.object_scale)
obejct_keypoints[key] = tranformed_keypoints[:3]
print(f"articulated_object_tail: {obejct_keypoints[key]}")
return obejct_keypoints
def get_robot_default_state(self):
transform_matrix = self.get_transform_matrix_by_prim_path(self.robot.base_path)
return transform_matrix
def get_robot_ee_pose(self):
return self.get_transform_matrix_by_prim_path(self.robot.ee_path)
def get_robot_tool_pose(self):
return self.get_transform_matrix_by_prim_path(self.robot.hand_path)
def get_object_link_pose(self):
# return world to object link transform matrix
transform_matrix = self.get_transform_matrix_by_prim_path(self.object.object_link_path)
return transform_matrix
def get_object_link0_contact_axis(self):
return self.axis_dict[self.object.object_link0_contact_axis]
def get_object_link0_move_axis(self):
return self.axis_dict[self.object.object_link0_move_axis]
def get_object_link0_vertical_axis(self):
return np.cross(self.get_object_link0_move_axis(), self.get_object_link0_contact_axis())
def get_object_link0_rot_axis(self):
return self.axis_dict[self.object.object_link0_rot_axis]
# debug end: KPAMPlanner, refactor all self.task.xxx() calls into planner methods
def get_env_info(self):
# get current end effector pose, joint angles, object poses, and keypoints from the environment
self.tool_keypoint_in_world = self.get_tool_keypoints() # robot keypoints in world frame
# {
# 'tool_head': np.array([-2.2506687e-01, -2.2929280e-08, 1.8791561e-01], dtype=np.float32),
# 'tool_tail': np.array([-2.1895210e-01, -1.7026323e-08, 2.7770764e-01], dtype=np.float32),
# 'tool_side': np.array([-0.22506687, -0.04500002, 0.18791561], dtype=np.float32)
# }
self.object_keypoint_in_world = self.get_object_keypoints() # object keypoints in world frame
# "articulated_object_head": np.array([-0.01,0.45,0.07], dtype=np.float32)
if self.draw_points:
# ========================= visualize keypoints =========================
list1 = []
list2 = []
for key, item in self.tool_keypoint_in_world.items():
list1.append(item)
for key, item in self.object_keypoint_in_world.items():
list2.append(item)
self.draw.draw_points(
list1,
[(1, 0, 0, 1), (0, 1, 0, 1), (0, 0, 1, 1)], # red head, green tail, blue side -> robot keypoints
[5] * len(list1),
)
self.draw.draw_points(
list2, [(1.0, 0.0, 1.0, 1)] * len(list2), [5] * len(list2) # purple -> object keypoints
)
# ======================================================================
self.dt = self.env._rendering_dt # simulator dt
self.time = self.env.current_time # current time
self.base_pose = self.get_robot_default_state() # robot base pose in world frame
# self.base_pose = np.array([[ 1.0000000e+00, 0.0000000e+00, 0.0000000e+00, -6.1500001e-01],
# [ 0.0000000e+00, 1.0000000e+00, 0.0000000e+00, -1.1641532e-10],
# [ 0.0000000e+00, 0.0000000e+00, 1.0000000e+00, 1.4901161e-08],
# [ 0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 1.0000000e+00]],
# dtype=np.float32)
if "split_aloha" in self.robot.name:
self.joint_positions = np.array([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.05, -0.05])
elif "lift2" in self.robot.name:
self.joint_positions = np.array([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.04, 0.04])
elif "franka" in self.robot.name:
self.joint_positions = self.robot.get_joint_positions()
else:
raise NotImplementedError
# self.joint_positions = np.array([0, -np.pi / 4, 0, -3 * np.pi / 4, 0, np.pi / 2, 0.0, 0.0, 0.0])
self.curr_tool_keypoints = self.compute_tool_keypoints_inbase() # robot keypoints in robot base frame
self.curr_object_keypoints = self.compute_object_keypoints_inbase() # object keypoints in robot base frame
self.ee_pose = self.compute_hand_pose_inbase() # robot base to end effector transform matrix
self.tool_pose = self.compute_tool_pose_inbase() # robot base to gripper transform matrix
self.object_pose = self.compute_object_pose_inbase() # robot base to object link_0 transform matrix
self.tool_keypoints_in_hand = self.compute_tool_keypoints_inhand() # robot keypoints in ee frame
self.tool_rel_pose = self.compute_tool_inhand() # robot gripper pose in ee frame
def compute_hand_pose_inbase(self):
ee_pose = self.get_robot_ee_pose()
inv_base_pose = se3_inverse(self.base_pose)
hand_pose_inbase = inv_base_pose.dot(ee_pose)
hand_pose_inbase = hand_pose_inbase.reshape(4, 4)
return hand_pose_inbase
def compute_tool_pose_inbase(self):
tool_pose = self.get_robot_tool_pose()
inv_base_pose = se3_inverse(self.base_pose)
tool_pose_inbase = inv_base_pose.dot(tool_pose)
tool_pose_inbase = tool_pose_inbase.reshape(4, 4)
return tool_pose_inbase
def compute_object_pose_inbase(self):
object_pose = self.get_object_link_pose()
inv_base_pose = se3_inverse(self.base_pose)
object_pose_inbase = inv_base_pose.dot(object_pose)
object_pose_inbase = object_pose_inbase.reshape(4, 4)
return object_pose_inbase
def compute_tool_keypoints_inbase(self):
inv_base_pose = se3_inverse(self.base_pose)
tool_keypoints_inbase = {}
for name, loc in self.tool_keypoint_in_world.items():
tool_keypoints_inbase[name] = inv_base_pose.dot(np.array([loc[0], loc[1], loc[2], 1]))[:3]
return tool_keypoints_inbase
def compute_object_keypoints_inbase(self):
inv_base_pose = se3_inverse(self.base_pose)
object_keypoints_inbase = {}
for name, loc in self.object_keypoint_in_world.items():
object_keypoints_inbase[name] = inv_base_pose.dot(np.array([loc[0], loc[1], loc[2], 1]))[:3]
return object_keypoints_inbase
def compute_tool_keypoints_inhand(self):
inv_ee_pose = se3_inverse(self.ee_pose)
tool_keypoints_inhand = {}
for name, loc in self.curr_tool_keypoints.items():
tool_keypoints_inhand[name] = inv_ee_pose.dot(np.array([loc[0], loc[1], loc[2], 1]))[:3]
return tool_keypoints_inhand
def compute_tool_inhand(self):
inv_ee_pose = se3_inverse(self.ee_pose)
tool_rel_pose = inv_ee_pose.dot(self.tool_pose)
return tool_rel_pose
def add_random_to_keypose(self, transform_matrix, orientation_range, position_range):
"""
Add random perturbations to a given transform matrix in the local coordinate frame.
Args:
transform_matrix (np.ndarray): 4x4 transform matrix expressed in the robot base frame.
orientation_range (dict): Rotation noise range in degrees, e.g.
{"x_min": min_angle, "x_max": max_angle, "y_min": ..., "y_max": ..., "z_min": ..., "z_max": ...}.
position_range (dict): Translation noise range, e.g.
{"x_min": min_offset, "x_max": max_offset, "y_min": ..., "y_max": ..., "z_min": ..., "z_max": ...}.
Returns:
np.ndarray: 4x4 transform matrix with added randomness.
"""
# extract rotation and translation
rotation_matrix = transform_matrix[:3, :3]
translation_vector = transform_matrix[:3, 3]
# add randomness to rotation (local frame)
# sample random Euler angles (X, Y, Z axes)
random_angles = [
np.random.uniform(orientation_range["x_min"], orientation_range["x_max"]),
np.random.uniform(orientation_range["y_min"], orientation_range["y_max"]),
np.random.uniform(orientation_range["z_min"], orientation_range["z_max"]),
]
# convert random Euler angles to rotation matrix (local frame)
random_rotation_local = R.from_euler("xyz", random_angles, degrees=True).as_matrix()
# apply random rotation to original rotation matrix (local frame; right-multiply)
new_rotation_matrix = rotation_matrix @ random_rotation_local # right-multiply in local frame
# add randomness to translation (local frame)
# sample random translation offsets (X, Y, Z axes)
random_offset = [
np.random.uniform(position_range["x_min"], position_range["x_max"]),
np.random.uniform(position_range["y_min"], position_range["y_max"]),
np.random.uniform(position_range["z_min"], position_range["z_max"]),
]
# transform random offset into global frame
random_offset_global = rotation_matrix @ np.array(random_offset)
# add random offset to original translation
new_translation_vector = translation_vector + random_offset_global
# build new transform matrix
new_transform_matrix = np.eye(4)
new_transform_matrix[:3, :3] = new_rotation_matrix
new_transform_matrix[:3, 3] = new_translation_vector
return new_transform_matrix
def parse_constraints(self):
"""
Parse constraints from YAML config file into list of constraint dictionaries
Returns:
list: List of constraint dictionaries
"""
constraint_dicts = []
# read constraint_list from configuration
for constraint in self.cfg["constraint_list"]:
# directly copy all fields without predefining a schema
constraint_dict = {k: v for k, v in constraint.items()}
if "target_axis" in constraint_dict and constraint_dict["target_axis"] == "link0_contact_axis":
vec = self.get_object_link0_contact_axis()
constraint_dict["target_axis"] = vec
elif "target_axis" in constraint_dict and constraint_dict["target_axis"] == "object_link0_move_axis":
vec = self.get_object_link0_move_axis()
constraint_dict["target_axis"] = vec
elif "target_axis" in constraint_dict and constraint_dict["target_axis"] == "object_link0_contact_axis":
vec = self.get_object_link0_contact_axis()
constraint_dict["target_axis"] = vec
elif "target_axis" in constraint_dict and constraint_dict["target_axis"] == "object_link0_vertical_axis":
vec = self.get_object_link0_vertical_axis()
constraint_dict["target_axis"] = vec
constraint_dicts.append(constraint_dict)
return constraint_dicts
# tool use related
def solve_actuation_joint(self, generate_traj=True):
# get target pose
"""solve the formulated kpam problem and get goal joint"""
# optimization_spec = OptimizationProblemSpecification()
# optimization_spec = self.create_opt_problem(optimization_spec)
# constraint_dicts = [c.to_dict() for c in optimization_spec._constraint_list]
# constraint_dicts example:
# [{'keypoint_name': 'tool_tail',
# 'target_keypoint_name': 'articulated_object_head',
# 'tolerance': 0.0001},
# {'axis_from_keypoint_name': 'tool_head',
# 'axis_to_keypoint_name': 'tool_side',
# 'target_axis': [0.0, -1.0, 2.9802322387695312e-08],
# 'target_axis_frame': 'object',
# 'target_inner_product': 1,
# 'tolerance': 0.01},
# {'axis_from_keypoint_name': 'tool_head',
# 'axis_to_keypoint_name': 'tool_tail',
# 'target_axis': [0.0, -1.0, 2.9802322387695312e-08],
# 'target_axis_frame': 'object',
# 'target_inner_product': 0,
# 'tolerance': 0.01}]
constraint_dicts = self.parse_constraints()
# need to parse the kpam config file and create a kpam problem
indexes = np.random.randint(len(anchor_seeds), size=(8,))
# array([12, 10, 9, 11, 4, 6, 6, 6])
random_seeds = [self.joint_positions.copy()[: self.robot_dof]] + [
anchor_seeds[idx][: self.robot_dof] for idx in indexes
]
solutions = []
for seed in random_seeds:
res = solve_ik_kpam(
get_relative_transform(
get_prim_at_path(self.object.object_link_path), get_prim_at_path(self.robot.base_path)
),
constraint_dicts,
self.plant.GetFrameByName(self.ee_name),
self.tool_keypoints_in_hand,
self.curr_object_keypoints,
RigidTransform(self.ee_pose.reshape(4, 4)),
seed.reshape(-1, 1),
self.joint_positions.copy()[:9],
rot_tol=0.01,
timeout=True,
consider_collision=False,
contact_plane_normal=self.object.contact_plane_normal,
)
if res is not None:
solutions.append(res.get_x_val()[:9])
# solutions example:
# [array([ 1.14329376e-06, 3.53816124e-01, 4.80939611e-06, -2.05026707e+00,
# -1.92385797e-05, 2.65837018e+00, 7.85377454e-01, 4.00000000e-02,
# 4.00000000e-02]),
# array([-1.774606 , -1.61710153, 1.6115427 , -2.16312007, 2.27760554,
# 2.11851842, -0.19566194, 0.04 , 0.04 ]),
# array([-8.44218317e-08, 3.53816313e-01, -1.85909422e-07, -2.05026625e+00,
# 7.54750962e-07, 2.65836829e+00, 7.85399185e-01, 4.00000000e-02,
# 4.00000000e-02]),
# array([-1.774606 , -1.61710153, 1.6115427 , -2.16312007, 2.27760554,
# 2.11851842, -0.19566194, 0.04 , 0.04 ]),
# array([-1.774606 , -1.61710153, 1.6115427 , -2.16312007, 2.27760554,
# 2.11851842, -0.19566194, 0.04 , 0.04 ])]
if len(solutions) == 0:
# raise ValueError("empty solution in kpam, target pose is unavailable")
self.goal_joint = self.joint_positions[:9].copy()
self.kpam_success = False
else:
self.kpam_success = True
solutions = np.array(solutions)
joint_positions = self.joint_positions[:9]
dist_to_init_joints = np.linalg.norm(solutions - joint_positions.copy(), axis=-1)
res = solutions[np.argmin(dist_to_init_joints)]
self.goal_joint = res
# select the best solution from solutions as res
# res example:
# array([-8.44218317e-08, 3.53816313e-01, -1.85909422e-07, -2.05026625e+00,
# 7.54750962e-07, 2.65836829e+00, 7.85399185e-01, 4.00000000e-02,
# 4.00000000e-02])
self.plant.SetPositions(self.fk_context, res)
# self.task_goal_hand_pose = self.differential_ik.ForwardKinematics(diff_ik_context)
self.task_goal_hand_pose = self.plant.EvalBodyPoseInWorld(
self.fk_context, self.plant.GetBodyByName(self.ee_name)
)
# self.task_goal_hand_pose example:
# RigidTransform(
# R=RotationMatrix([
# [0.9678432216687978, -1.8063898958705824e-06, 0.25155416567113187],
# [-1.9244272431764666e-06, -0.9999999999981233, 2.2322813388666276e-07],
# [0.2515541656702566, -7.001475258030282e-07, -0.9678432216704577],
# ]),
# p=[0.6182456460224572, -1.8402926150504245e-07, 0.29068015466097613],
# )
self.task_goal_hand_pose = np.array(self.task_goal_hand_pose.GetAsMatrix4())
### add random to keypose ###
orientation_range = self.cfg["keypose_random_range"]["orientation"]
position_range = self.cfg["keypose_random_range"]["position"]
self.task_goal_hand_pose = self.add_random_to_keypose(
self.task_goal_hand_pose, orientation_range, position_range
)
### add random to keypose ###
# self.task_goal_hand_pose example;
# array([[ 9.67843222e-01, -1.80638990e-06, 2.51554166e-01,
# 6.18245646e-01],
# [-1.92442724e-06, -1.00000000e+00, 2.23228134e-07,
# -1.84029262e-07],
# [ 2.51554166e-01, -7.00147526e-07, -9.67843222e-01,
# 2.90680155e-01],
# [ 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
# 1.00000000e+00]])
self.task_goal_tool_pose = self.task_goal_hand_pose @ self.tool_rel_pose
# # Transform the keypoint
# self.curr_solution_tool_keypoint_head = SE3_utils.transform_point(
# self.task_goal_hand_pose, tool_keypoint_loc_inhand[0, :]
# )
# self.curr_solution_tool_keypoint_tail = SE3_utils.transform_point(
# self.task_goal_hand_pose, tool_keypoint_loc_inhand[1, :]
# )
# self.curr_solution_tool_keypoint_side = SE3_utils.transform_point(
# self.task_goal_hand_pose, tool_keypoint_loc_inhand[2, :]
# )
self.plan_time = self.env.current_time
# self.goal_keypoint = np.stack(
# (
# self.curr_solution_tool_keypoint_head,
# self.curr_solution_tool_keypoint_tail,
# self.curr_solution_tool_keypoint_side,
# ),
# axis=0,
# )
def get_task_traj_from_joint_traj(self):
"""forward kinematics the joint trajectory to get the task trajectory"""
self.pose_traj = []
ik_times = dense_sample_traj_times(self.sample_times, self.actuation_time)
# print(ik_times)
self.dense_ik_times = ik_times
for traj_time in ik_times:
# diff_ik_context = self.differential_ik.GetMyMutableContextFromRoot(self.context)
set_joints = self.joint_space_traj.value(traj_time)
# print(set_joints)
self.plant.SetPositions(self.fk_context, set_joints)
pose = self.plant.EvalBodyPoseInWorld(self.fk_context, self.plant.GetBodyByName(self.ee_name))
self.pose_traj.append(pose.GetAsMatrix4())
self.task_space_traj = PiecewisePose.MakeLinear(ik_times, [RigidTransform(p) for p in self.pose_traj])
def modify_actuation_joint(self):
curr_pose = self.task_goal_hand_pose
motion = self.modify_actuation_motion
mode = motion[0]
value = motion[1]
assert mode in ["translate_x", "translate_y", "translate_z", "rotate"]
assert type(value) == float
if mode == "rotate":
curr_pose = self.get_pose_from_rotation(value, curr_pose)
self.pre_actuation_poses.append(curr_pose)
else:
value_vec = [0, 0, 0]
if mode == "translate_x":
value_vec[0] = value
elif mode == "translate_y":
value_vec[1] = value
elif mode == "translate_z":
value_vec[2] = value
curr_pose = self.get_pose_from_translation(value_vec, curr_pose)
self.task_goal_hand_pose = curr_pose # update
return
def solve_postactuation_traj(self):
"""
generate the full task trajectory with a FirstOrderHold
"""
self.generate_actuation_poses()
def save_traj(self, traj):
"""save the trajectory to a txt file"""
traj = np.array(traj)
np.savetxt("traj.txt", traj, delimiter=",")
def solve_joint_traj(self, densify=True):
# get traj from key pose constraints
"""
solve for the IKs for each individual waypoint as an initial guess, and then
solve for the whole trajectory with smoothness cost
"""
keyposes = self.traj_keyframes # 5
keytimes = self.sample_times # 5
self.joint_traj_waypoints = [self.joint_positions.copy()]
# set_trace()
# [self.joint_positions.copy(), np.array(self.goal_joint,dtype=np.float32)] is reasonable,
# but the solved self.joint_space_traj can be poor and self.goal_joint may collide with the object.
# self.joint_space_traj = PiecewisePolynomial.FirstOrderHold(
# [self.env.current_time, self.actuation_time, self.post_actuation_times[-1]],
# np.array([self.joint_positions.copy(), np.array(self.goal_joint,dtype=np.float32), self.joint_positions.copy()]).T,
# )
# directly interpolate between current pose and target pose as an initial guess
self.joint_space_traj = PiecewisePolynomial.FirstOrderHold(
[self.env.current_time, self.actuation_time],
np.array([self.joint_positions.copy(), np.array(self.goal_joint, dtype=np.float32)]).T,
)
print("self.env.current_time: ", self.env.current_time, " self.actuation_time: ", self.actuation_time)
print("self.joint_space_traj: ", self.joint_space_traj)
if densify:
self.dense_traj_times = dense_sample_traj_times(self.sample_times, self.actuation_time)
else:
self.dense_traj_times = self.sample_times
print("solve traj endpoint")
# interpolated joint
res = solve_ik_traj_with_standoff(
[self.ee_pose.reshape(4, 4), self.task_goal_hand_pose],
np.array([self.joint_positions.copy(), self.goal_joint]).T,
q_traj=self.joint_space_traj,
waypoint_times=self.dense_traj_times,
keyposes=keyposes,
keytimes=keytimes,
)
# solve the standoff and the remaining pose use the goal as seed.
# stitch the trajectory
if res is not None:
print("endpoint trajectory solved!")
# use the joint trajectory to build task trajectory for panda
self.joint_plan_success = True
self.joint_traj_waypoints = res.get_x_val().reshape(-1, 9)
self.joint_traj_waypoints = list(self.joint_traj_waypoints)
self.joint_space_traj = PiecewisePolynomial.CubicShapePreserving(
self.dense_traj_times, np.array(self.joint_traj_waypoints).T
)
if densify:
self.get_task_traj_from_joint_traj()
else:
raise ValueError("endpoint trajectory not solved!")
self.joint_plan_success = False
self.env.need_termination = True
if densify:
self.get_task_traj_from_joint_traj()
def get_joint_action(self):
"""get the joint space action"""
if self.check_plan_empty():
print("no joint trajectory")
return self.env.reset()
# lookahead
return self.joint_space_traj.value(self.env.current_time + self.env.env_dt).reshape(-1)
def get_pose_action(self, traj_eff_pose):
# transform matrix to action(3+3+1)
"""get the task space action"""
# traj_eff_pose_inworld = self.base_pose @ traj_eff_pose
action = pack_pose(traj_eff_pose, rot_type="euler")
# action = np.concatenate([action, [self.env.gripper_state]])
action = np.concatenate([action, [1]])
return action
def get_actuation_joint(self):
if self.goal_joint is not None:
return self.goal_joint
if self.check_plan_empty():
self.solve_actuation_joint()
return self.goal_joint
raise ValueError("no actuation joint")
def get_keypose(self):
self.get_env_info()
if self.check_plan_empty():
s = time.time()
self.solve_actuation_joint()
if self.kpam_success:
if "modify_actuation_motion" in self.cfg:
self.modify_actuation_joint() # PL: for the rotate knob task, we add this feature to enable goal hand pose modifaction
self.solve_postactuation_traj()
else:
self.traj_keyframes = []
self.sample_times = []
return self.traj_keyframes, self.sample_times
def get_action(self, mode="pose"):
self.get_env_info()
if self.check_plan_empty():
s = time.time()
self.solve_actuation_joint()
self.solve_postactuation_traj()
self.solve_joint_traj()
print("env time: {:.3f} plan generation time: {:.3f}".format(self.env.current_time, time.time() - s))
"""get the task-space action from the kpam expert joint trajectory"""
traj_eff_pose = self.task_space_traj.value(self.time + self.dt) # get transform matrix by current time
pose_action = self.get_pose_action(traj_eff_pose) # transform matrix to action(3+3+1)
return pose_action
def get_actuation_qpos(self):
"""get the actuation qpos"""
self.get_env_info()
self.solve_actuation_joint()
return self.goal_joint, self.kpam_success
def get_sparse_traj_qpos(self):
"""get the sparse trajectory qpos"""
self.get_env_info()
self.solve_actuation_joint()
self.solve_postactuation_traj()
self.solve_joint_traj(densify=False)
return self.joint_traj_waypoints, self.joint_plan_success
if __name__ == "__main__":
from isaacsim import SimulationApp
simulation_app = SimulationApp({"headless": False})
import numpy as np
# from omni.isaac.franka.controllers.pick_place_controller import PickPlaceController
from gensim_testing_v2.tasks.close_microwave import CloseMicrowave
from omni.isaac.core import World
from omni.isaac.core.utils.types import ArticulationAction
# from solver.planner import KPAMPlanner
my_world = World(stage_units_in_meters=1.0)
my_task = CloseMicrowave()
my_world.add_task(my_task)
my_world.reset()
task_params = my_task.get_params()
my_franka = my_world.scene.get_object(task_params["robot_name"]["value"])
expert_planner = KPAMPlanner(
my_world,
cfg_path="gensim_testing_v2/solver/kpam/config/CloseMicrowave.yaml",
)
expert_planner.get_keypose()
# print("action:",action)
# 3d pose + 3d euler + 1d gipper openness
# action: [ 0.38964379 0.00477079 0.45819783 3.09401237 -0.79489916 0.07860673 1. ]

View File

@@ -0,0 +1,720 @@
# pylint: skip-file
# flake8: noqa
import json
import cv2
import IPython
import numpy as np
import open3d as o3d
from colored import fg
from pydrake.all import *
from transforms3d.axangles import *
from transforms3d.euler import *
from transforms3d.quaternions import *
robot_plant = None
franka_gripper_points = np.array(
[
[0.0, 0.0, 0.0],
[0.0, 0.0, 0.0],
[0.053, -0.0, 0.075],
[-0.053, 0.0, 0.075],
[0.053, -0.0, 0.105],
[-0.053, 0.0, 0.105],
]
)
anchor_seeds = np.array(
[
[0.0, -1.285, 0, -2.356, 0.0, 1.571, 0.785, 0, 0],
[2.5, 0.23, -2.89, -1.69, 0.056, 1.46, -1.27, 0, 0],
[2.8, 0.23, -2.89, -1.69, 0.056, 1.46, -1.27, 0, 0],
[2, 0.23, -2.89, -1.69, 0.056, 1.46, -1.27, 0, 0],
[2.5, 0.83, -2.89, -1.69, 0.056, 1.46, -1.27, 0, 0],
[0.049, 1.22, -1.87, -0.67, 2.12, 0.99, -0.85, 0, 0],
[-2.28, -0.43, 2.47, -1.35, 0.62, 2.28, -0.27, 0, 0],
[-2.02, -1.29, 2.20, -0.83, 0.22, 1.18, 0.74, 0, 0],
[-2.2, 0.03, -2.89, -1.69, 0.056, 1.46, -1.27, 0, 0],
[-2.5, -0.71, -2.73, -0.82, -0.7, 0.62, -0.56, 0, 0],
[-2, -0.71, -2.73, -0.82, -0.7, 0.62, -0.56, 0, 0],
[-2.66, -0.55, 2.06, -1.77, 0.96, 1.77, -1.35, 0, 0],
[1.51, -1.48, -1.12, -1.55, -1.57, 1.15, 0.24, 0, 0],
[-2.61, -0.98, 2.26, -0.85, 0.61, 1.64, 0.23, 0, 0],
]
)
def add_table_collision_free_constraint(ik, plant, frame, bb_size=[0.12, 0.08, 0.08], table_height=0.1):
# apprxoimate a link with a bounding box and add as position constraints
min_height = -0.01 + table_height
max_num = 100
y_bound = 1
ik.AddPositionConstraint(
frame,
[0, 0, 0],
plant.world_frame(),
[0, -y_bound, min_height],
[max_num, y_bound, max_num],
)
ik.AddPositionConstraint(
frame,
[0, 0, -bb_size[2]],
plant.world_frame(),
[-0.3, -y_bound, min_height],
[max_num, y_bound, max_num],
)
ik.AddPositionConstraint(
frame,
[0, 0, bb_size[2]],
plant.world_frame(),
[-0.3, -y_bound, min_height],
[max_num, y_bound, max_num],
)
ik.AddPositionConstraint(
frame,
[0, -bb_size[1], 0],
plant.world_frame(),
[-0.3, -y_bound, min_height],
[max_num, y_bound, max_num],
)
ik.AddPositionConstraint(
frame,
[0, bb_size[1], 0],
plant.world_frame(),
[-0.3, -y_bound, min_height],
[max_num, y_bound, max_num],
)
ik.AddPositionConstraint(
frame,
[bb_size[0], 0, 0],
plant.world_frame(),
[-0.3, -y_bound, min_height],
[max_num, y_bound, max_num],
)
ik.AddPositionConstraint(
frame,
[-bb_size[0], 0, 0],
plant.world_frame(),
[-0.3, -y_bound, min_height],
[max_num, y_bound, max_num],
)
def make_gripper_pts(points, color=(1, 0, 0)):
# o3d.visualization.RenderOption.line_width = 8.0
line_index = [[0, 1], [1, 2], [1, 3], [3, 5], [2, 4]]
cur_gripper_pts = points.copy()
cur_gripper_pts[1] = (cur_gripper_pts[2] + cur_gripper_pts[3]) / 2.0
line_set = o3d.geometry.LineSet()
line_set.points = o3d.utility.Vector3dVector(cur_gripper_pts)
line_set.lines = o3d.utility.Vector2iVector(line_index)
line_set.colors = o3d.utility.Vector3dVector([color for i in range(len(line_index))])
return line_set
def get_interp_time(curr_time, finish_time, ratio):
"""get interpolated time between curr and finish"""
return (finish_time - curr_time) * ratio + curr_time
def se3_inverse(RT):
RT = RT.reshape(4, 4)
R = RT[:3, :3]
T = RT[:3, 3].reshape((3, 1))
RT_new = np.eye(4, dtype=np.float32)
RT_new[:3, :3] = R.transpose()
RT_new[:3, 3] = -1 * np.dot(R.transpose(), T).reshape((3))
return RT_new
def rotAxis(axis, angle):
if axis == "x":
return rotX(angle)
elif axis == "y":
return rotY(angle)
elif axis == "z":
return rotZ(angle)
else:
Rot = np.eye(4)
Rot[:3, :3] = axangle2mat(axis, angle)
return Rot
def rotZ(rotz):
RotZ = np.array(
[
[np.cos(rotz), -np.sin(rotz), 0, 0],
[np.sin(rotz), np.cos(rotz), 0, 0],
[0, 0, 1, 0],
[0, 0, 0, 1],
]
)
return RotZ
def rotY(roty):
RotY = np.array(
[
[np.cos(roty), 0, np.sin(roty), 0],
[0, 1, 0, 0],
[-np.sin(roty), 0, np.cos(roty), 0],
[0, 0, 0, 1],
]
)
return RotY
def rotX(rotx):
RotX = np.array(
[
[1, 0, 0, 0],
[0, np.cos(rotx), -np.sin(rotx), 0],
[0, np.sin(rotx), np.cos(rotx), 0],
[0, 0, 0, 1],
]
)
return RotX
def mat2axangle_(mat):
axangle = mat2axangle(mat[:3, :3])
return axangle[0] * axangle[1]
def pack_pose(pose, rot_type="quat"): # for action
rot_index = 4 if rot_type == "quat" else 3
if rot_type == "quat":
rot_func = mat2quat
elif rot_type == "euler":
rot_func = mat2euler
elif rot_type == "axangle":
rot_func = mat2axangle_
packed = np.zeros(3 + rot_index)
packed[3 : 3 + rot_index] = rot_func(pose[:3, :3])
packed[:3] = pose[:3, 3]
return packed
def dense_sample_traj_times(sample_times, task_completion_time, start_idx=1):
"""densify the waypoints for IK trajectory"""
ik_times = list(sample_times)
for i in range(start_idx, len(sample_times)):
ratio = 1 if sample_times[i] < task_completion_time else 2
N = int((sample_times[i] - sample_times[i - 1]) * ratio) #
for j in range(N):
ik_times.insert(i, get_interp_time(sample_times[i - 1], sample_times[i], j / N))
# force insertion of sample_times and then sort/deduplicate
ik_times.extend(sample_times)
ik_times = sorted(np.unique(ik_times))
return ik_times
def compose_circular_key_frames(
center_pose,
start_time=0,
r=0.02,
discrete_frame_num=5,
total_degrees=np.pi * 2,
total_time=3,
):
"""generating the circular motion for whisk for instance"""
poses = []
times = [start_time + (i + 1) / discrete_frame_num * total_time for i in range(discrete_frame_num)]
for i in range(discrete_frame_num):
pose = center_pose.copy()
pose[:2, 3] += (rotZ(i / discrete_frame_num * total_degrees)[:3, :3] @ np.array([r, 0, 0]))[:2]
poses.append(pose)
return times, poses
def compose_rotate_y_key_frames(
center_pose,
start_time=0,
r=0.02,
discrete_frame_num=5,
total_degrees=np.pi * 2,
total_time=3,
):
"""generating the circular motion for whisk for instance"""
poses = []
times = [start_time + (i + 1) / discrete_frame_num * total_time for i in range(discrete_frame_num)]
for i in range(discrete_frame_num):
pose = center_pose.copy()
pose[:2, 3] += (rotZ(i / discrete_frame_num * total_degrees)[:3, :3] @ np.array([r, 0, 0]))[:2]
poses.append(pose)
return times, poses
def compose_rotating_key_frames(
ef_pose,
center_point,
start_time=0,
discrete_frame_num=5,
total_degrees=np.pi * 2,
total_time=3,
):
"""generating the circular motion with facing center for wrench for instance"""
poses = []
times = [start_time + (i + 1) / discrete_frame_num * total_time for i in range(discrete_frame_num)]
rel_pose = ef_pose.copy()
rel_pose[:3, 3] -= center_point
for i in range(discrete_frame_num): # clockwise
pose = rotZ(-(i + 1) / discrete_frame_num * total_degrees) @ rel_pose.copy()
pose[:3, 3] += center_point
poses.append(pose)
return times, poses
def solve_ik_kpam(
link2base, # world to object link transform matrix
constraint_dicts,
gripper_frame,
keypoints_robot_in_hand, # robot keypoints in ee frame
keypoints_object_in_robot, # object keypoints in robot base frame
p0,
q0,
centering_joint,
rot_tol=np.pi / 10,
add_table_col=True,
add_gripper_faceup=False,
timeout=False,
consider_collision=False,
table_height=0.15,
contact_plane_normal=None,
):
"""
the simple case for the kpam problem
always assume the head tool keyponint match the head object keypoint
the head and tail orthogonal to the 0,0,1
and the tail and side orthogonal to 0,0,1
minimize cost and both joint space and the pose space
"""
ik_context = robot_plant.CreateDefaultContext()
ik = InverseKinematics(robot_plant, ik_context)
# separate out cost and constraint
# cost is more or less address by the position and orientation constraint
# transfer target axis orientation from link_0 frame to world frame
def get_target_axis(link2base, target_axis):
vec = link2base[:3, :3] @ target_axis
return vec
# separate out axis
for constraint in constraint_dicts:
# if "name" in constraint and constraint["name"]=="hand_parallel_to_link0_edge":
# tool_head2side = keypoints_robot_in_hand["tool_side"] - keypoints_robot_in_hand["tool_head"]
# object_vec = get_target_axis(link2base,constraint["target_axis"])
# # print("===============================",np.dot(tool_head2side, object_vec)/np.linalg.norm(tool_head2side),"===============================")
# if np.dot(tool_head2side, object_vec)/np.linalg.norm(tool_head2side)>-0.5:
# constraint["target_inner_product"]= -1
if "name" in constraint and constraint["name"] == "fingers_orthogonal_to_link0":
from_name = constraint["axis_from_keypoint_name"]
to_name = constraint["axis_to_keypoint_name"]
tol = constraint["tolerance"]
tool_vec = keypoints_robot_in_hand[to_name] - keypoints_robot_in_hand[from_name]
target1_from_name = constraint["cross_target_axis1_from_keypoint_name"]
target1_to_name = constraint["cross_target_axis1_to_keypoint_name"]
target_vec1 = keypoints_object_in_robot[target1_to_name] - keypoints_object_in_robot[target1_from_name]
target_vec2 = get_target_axis(link2base, constraint["target_axis"])
target_vec = np.cross(target_vec1, target_vec2)
tol = constraint["tolerance"]
# use cross product to get perpendicular direction
tgt = np.arccos(constraint["target_inner_product"])
lower_bound = max(tgt - tol, 0)
upper_bound = min(tgt + tol, np.pi)
ik.AddAngleBetweenVectorsConstraint(
gripper_frame,
tool_vec / np.linalg.norm(tool_vec),
robot_plant.world_frame(),
target_vec / np.linalg.norm(target_vec),
lower_bound,
upper_bound,
)
elif "name" in constraint and constraint["name"] == "hand_parallel_to_link0_edge_door":
from_name = constraint["axis_from_keypoint_name"]
to_name = constraint["axis_to_keypoint_name"]
tool_vec = keypoints_robot_in_hand[to_name] - keypoints_robot_in_hand[from_name]
target1_from_name = constraint["cross_target_axis1_from_keypoint_name"]
target1_to_name = constraint["cross_target_axis1_to_keypoint_name"]
target_vec = keypoints_object_in_robot[target1_to_name] - keypoints_object_in_robot[target1_from_name]
# use cross product to get perpendicular direction
tgt = np.arccos(constraint["target_inner_product"])
lower_bound = max(tgt - tol, 0)
upper_bound = min(tgt + tol, np.pi)
ik.AddAngleBetweenVectorsConstraint(
gripper_frame,
tool_vec / np.linalg.norm(tool_vec),
robot_plant.world_frame(),
target_vec / np.linalg.norm(target_vec),
lower_bound,
upper_bound,
)
elif "name" in constraint and constraint["name"] == "hand_parallel_to_link0_edge":
from_name = constraint["axis_from_keypoint_name"]
to_name = constraint["axis_to_keypoint_name"]
target_axis = get_target_axis(link2base, constraint["target_axis"])
vec = keypoints_robot_in_hand[to_name] - keypoints_robot_in_hand[from_name]
tol = constraint["tolerance"]
tgt_innger_product = constraint["target_inner_product"]
tgt = np.arccos(tgt_innger_product)
lower_bound = max(tgt - tol, 0)
upper_bound = min(tgt + tol, np.pi)
ik.AddAngleBetweenVectorsConstraint(
gripper_frame,
vec / np.linalg.norm(vec),
robot_plant.world_frame(),
target_axis / np.linalg.norm(target_axis),
lower_bound,
upper_bound,
)
elif "name" in constraint and constraint["name"] == "hand_parallel_to_axis_computed_by_keypoints":
from_name = constraint["axis_from_keypoint_name"]
to_name = constraint["axis_to_keypoint_name"]
vec = keypoints_robot_in_hand[to_name] - keypoints_robot_in_hand[from_name]
from_name = constraint["target_axis_from_keypoint_name"]
to_name = constraint["target_axis_to_keypoint_name"]
target_axis = keypoints_object_in_robot[to_name] - keypoints_object_in_robot[from_name]
tol = constraint["tolerance"]
tgt_innger_product = constraint["target_inner_product"]
tgt = np.arccos(tgt_innger_product)
lower_bound = max(tgt - tol, 0)
upper_bound = min(tgt + tol, np.pi)
ik.AddAngleBetweenVectorsConstraint(
gripper_frame,
vec / np.linalg.norm(vec),
robot_plant.world_frame(),
target_axis / np.linalg.norm(target_axis),
lower_bound,
upper_bound,
)
elif "name" in constraint and constraint["name"] == "hand_parallel_to_link0_move_axis":
from_name = constraint["axis_from_keypoint_name"]
to_name = constraint["axis_to_keypoint_name"]
target_axis = get_target_axis(link2base, constraint["target_axis"])
vec = keypoints_robot_in_hand[to_name] - keypoints_robot_in_hand[from_name]
tol = constraint["tolerance"]
tgt = np.arccos(constraint["target_inner_product"])
lower_bound = max(tgt - tol, 0)
upper_bound = min(tgt + tol, np.pi)
ik.AddAngleBetweenVectorsConstraint(
gripper_frame,
vec / np.linalg.norm(vec),
robot_plant.world_frame(),
target_axis / np.linalg.norm(target_axis),
lower_bound,
upper_bound,
)
elif "name" in constraint and (constraint["name"] == "fingers_contact_with_link0"):
name = constraint["keypoint_name"]
target_name = constraint["target_keypoint_name"]
tool_point = keypoints_robot_in_hand[name]
object_point = keypoints_object_in_robot[target_name]
tol = constraint["tolerance"]
ik.AddPositionConstraint(
gripper_frame,
tool_point,
robot_plant.world_frame(),
object_point - tol,
object_point + tol,
)
else:
raise ValueError("undefined constraint")
"""solving IK to match tool head keypoint and the object keypoint"""
# maybe add slackness
# make sure the arm does not go backward
ik.AddPositionConstraint(gripper_frame, [0, 0, 0], robot_plant.world_frame(), [0.05, -1, 0], [1, 1, 1])
# no rotation constraint
# ik.AddOrientationConstraint(gripper_frame, RotationMatrix(), plant.world_frame(), pose.rotation(), rot_tol)
if add_gripper_faceup:
ik.AddAngleBetweenVectorsConstraint(
gripper_frame,
[1, 0, 0],
robot_plant.world_frame(),
[0, 0, -1],
np.pi / 12,
np.pi,
)
# not touching table constraints add elbow
# if add_table_col:
# add_table_collision_free_constraint(ik, robot_plant, gripper_frame, [0.03, 0.04, 0.08])
# add_table_collision_free_constraint(
# ik,
# robot_plant,
# robot_plant.GetFrameByName("panda_link6"),
# [0.03, 0.03, 0.03],
# )
# add_table_collision_free_constraint(
# ik,
# robot_plant,
# robot_plant.GetFrameByName("panda_link7"),
# [0.03, 0.03, 0.03],
# )
if consider_collision:
ik.AddMinimumDistanceConstraint(0.01) # 0.03
prog = ik.get_mutable_prog()
q = ik.q()
solver = SnoptSolver()
# if timeout:
# solver.SetSolverOption(solver.id(), "Major Iterations Limit", 1000)
# as well as pose space costs experiments
# print("added quadratic loss")
joint_cost_mat = np.identity(len(q))
joint_cost_mat[0, 0] = 10 # 000
prog.AddQuadraticErrorCost(joint_cost_mat, centering_joint, q)
ik.AddPositionCost(gripper_frame, [0, 0, 0], robot_plant.world_frame(), p0.translation(), np.eye(3))
ik.AddOrientationCost(gripper_frame, RotationMatrix(), robot_plant.world_frame(), p0.rotation(), 1)
prog.SetInitialGuess(q, q0)
result = solver.Solve(ik.prog()) #
if result.is_success():
return result
else:
return None
def solve_ik_traj_with_standoff(
endpoint_pose,
endpoint_joints, # 9D joint vector at endpoints
q_traj, # trajectory interpolated from current -> target -> current pose
waypoint_times, # time stamps after interpolation densification
keytimes, # keyframe time stamps
keyposes, # keyframe poses as transform matrices
rot_tol=0.03,
):
"""run a small trajopt on the trajectory with the solved IK from end-effector traj"""
# make sure the beginning and the end do not get updated
# print(waypoint_times)
# print(keytimes)
# print(len(keyposes))
waypoint_num = len(waypoint_times)
prog = MathematicalProgram()
q = prog.NewContinuousVariables(9, waypoint_num)
gripper_frame = robot_plant.GetFrameByName("panda_hand")
plant_contexts = [robot_plant.CreateDefaultContext() for i in range(waypoint_num)]
q0 = np.array([q_traj.value(t) for t in waypoint_times])
# Add key poses as constraints into the optimization program
for idx, time in enumerate(waypoint_times):
if time in keytimes: # standoff
keypoint_idx = keytimes.index(time)
if keypoint_idx == 0:
continue
pose = RigidTransform(keyposes[keypoint_idx])
prog.AddConstraint(
OrientationConstraint(
robot_plant,
gripper_frame,
RotationMatrix(),
robot_plant.world_frame(),
pose.rotation(),
rot_tol,
plant_contexts[idx],
),
q[:, idx],
)
prog.AddConstraint(
PositionConstraint(
robot_plant,
robot_plant.world_frame(),
pose.translation(),
pose.translation(),
gripper_frame,
[0, 0, 0],
plant_contexts[idx],
),
q[:, idx],
)
# matrix=np.eye(4)
# matrix[:3,:3]=pose.rotation()
# matrix[:3,3]=pose.translation()
# print(pose.GetAsMatrix4()[0],",")
# # table constraint
# prog.AddConstraint(
# PositionConstraint(
# robot_plant,
# robot_plant.world_frame(),
# [0.05, -0.7, 0.06],
# [1, 0.7, 1],
# gripper_frame,
# [0, 0, 0],
# plant_contexts[idx],
# ),
# q[:, idx],
# )
# Constrain the initial trajectory point to match the current pose
# add some other constraints
prog.AddConstraint(np.sum((q[:, 0] - endpoint_joints[:, 0]) ** 2) == 0) # same starting point
# Constrain trajectory smoothness
# Add smoothness cost
weight = np.ones((9, 1))
weight[0] = 10.0
weight[-1] = 10.0
prog.AddQuadraticCost(np.sum(weight * (q[:, 1:] - q[:, :-1]) ** 2))
prog.SetInitialGuess(q, q0.squeeze().T)
# solve the trajopt
try:
solver = SnoptSolver()
result = solver.Solve(prog)
if not result.is_success():
print("SnoptSolver failed, use IpoptSolver")
solver = IpoptSolver()
result = solver.Solve(prog)
except:
print("SnoptSolver error, use IpoptSolver")
solver = IpoptSolver()
result = solver.Solve(prog)
if result.is_success():
return result
else:
print("solve_ik_traj_with_standoff return None")
return None
def build_plant(time_step=0.004, discrete_contact_solver="sap", robot_name=None, init_qpos=None):
global robot_plant
multibody_plant_config = MultibodyPlantConfig(
time_step=time_step,
discrete_contact_solver=discrete_contact_solver,
)
builder = DiagramBuilder()
robot_plant, _ = AddMultibodyPlant(multibody_plant_config, builder)
if "split_aloha" in robot_name:
# franka = AddFranka(robot_plant, init_qpos=None)
franka = AddPiper(robot_plant, init_qpos=None)
elif "lift2" in robot_name:
franka = AddR5a(robot_plant, init_qpos=None)
elif "franka" in robot_name:
# if "robotiq" in robot_name:
# franka = AddFranka(robot_plant, init_qpos=[0, -np.pi / 4, 0, -3 * np.pi / 4, 0, np.pi / 2, np.pi / 4])
# else:
# franka = AddFranka(robot_plant, init_qpos=[0, -np.pi / 4, 0, -3 * np.pi / 4, 0, np.pi / 2, np.pi / 4])
# set_trace()
franka = AddFranka(robot_plant, init_qpos=init_qpos)
else:
raise NotImplementedError
robot_plant.Finalize()
diagram = builder.Build()
fk_context = diagram.CreateDefaultContext()
fk_plant_context = robot_plant.GetMyMutableContextFromRoot(fk_context)
return robot_plant, fk_plant_context
def AddR5a(plant, init_qpos):
franka_combined_path = "workflows/simbox/panda_drake/r5a/R5a.urdf"
parser = Parser(plant)
franka = parser.AddModelFromFile(franka_combined_path)
plant.WeldFrames(plant.world_frame(), plant.GetFrameByName("base_link"))
# Set default positions:
if init_qpos is None:
init_qpos = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.04, 0.04]
index = 0
for joint_index in plant.GetJointIndices(franka):
joint = plant.get_mutable_joint(joint_index)
if isinstance(joint, RevoluteJoint):
joint.set_default_angle(init_qpos[index])
index += 1
return franka
def AddPiper(plant, init_qpos):
franka_combined_path = "workflows/simbox/panda_drake/piper100/piper100.urdf"
parser = Parser(plant)
franka = parser.AddModelFromFile(franka_combined_path)
plant.WeldFrames(plant.world_frame(), plant.GetFrameByName("arm_base"))
# Set default positions:
if init_qpos is None:
init_qpos = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.05, -0.05]
index = 0
for joint_index in plant.GetJointIndices(franka):
joint = plant.get_mutable_joint(joint_index)
if isinstance(joint, RevoluteJoint):
joint.set_default_angle(init_qpos[index])
index += 1
return franka
def AddFranka(plant, init_qpos):
franka_combined_path = "workflows/simbox/panda_drake/panda_arm_hand.urdf"
parser = Parser(plant)
franka = parser.AddModelFromFile(franka_combined_path)
plant.WeldFrames(plant.world_frame(), plant.GetFrameByName("panda_link0"))
# Set default positions:
if init_qpos is None:
init_qpos = [
# 0,
# -0.32,
# 0.0,
# -2.617993877991494,
# 0.0,
# 2.23,
# 0.7853981633974483,
# 0.04,
# 0.04,
0,
-math.pi / 4,
0,
-3 * math.pi / 4,
0,
math.pi / 2,
0.0,
0.0,
0.0,
]
index = 0
for joint_index in plant.GetJointIndices(franka):
joint = plant.get_mutable_joint(joint_index)
if isinstance(joint, RevoluteJoint):
joint.set_default_angle(init_qpos[index])
index += 1
return franka