diff --git a/brett2/PR2.py b/brett2/PR2.py
index 18ebf79..4d22355 100644
--- a/brett2/PR2.py
+++ b/brett2/PR2.py
@@ -114,21 +114,21 @@ def __init__(self, rave_only=False):
self.torso = Torso(self)
self.base = Base(self)
- # make the joint limits match the PR2 soft limits
- low_limits, high_limits = self.robot.GetDOFLimits()
- rarm_low_limits = [-2.1353981634, -0.3536, -3.75, -2.1213, None, -2.0, None]
- rarm_high_limits = [0.564601836603, 1.2963, 0.65, -0.15, None, -0.1, None]
- for rarm_index, low, high in zip(self.robot.GetManipulator("rightarm").GetArmIndices(), rarm_low_limits, rarm_high_limits):
- if low is not None and high is not None:
- low_limits[rarm_index] = low
- high_limits[rarm_index] = high
- larm_low_limits = [-0.564601836603, -0.3536, -0.65, -2.1213, None, -2.0, None]
- larm_high_limits = [2.1353981634, 1.2963, 3.75, -0.15, None, -0.1, None]
- for larm_index, low, high in zip(self.robot.GetManipulator("leftarm").GetArmIndices(), larm_low_limits, larm_high_limits):
- if low is not None and high is not None:
- low_limits[larm_index] = low
- high_limits[larm_index] = high
- self.robot.SetDOFLimits(low_limits, high_limits)
+ ## make the joint limits match the PR2 soft limits
+ #low_limits, high_limits = self.robot.GetDOFLimits()
+ #rarm_low_limits = [-2.1353981634, -0.3536, -3.75, -2.1213, None, -2.0, None]
+ #rarm_high_limits = [0.564601836603, 1.2963, 0.65, -0.15, None, -0.1, None]
+ #for rarm_index, low, high in zip(self.robot.GetManipulator("rightarm").GetArmIndices(), rarm_low_limits, rarm_high_limits):
+ #if low is not None and high is not None:
+ #low_limits[rarm_index] = low
+ #high_limits[rarm_index] = high
+ #larm_low_limits = [-0.564601836603, -0.3536, -0.65, -2.1213, None, -2.0, None]
+ #larm_high_limits = [2.1353981634, 1.2963, 3.75, -0.15, None, -0.1, None]
+ #for larm_index, low, high in zip(self.robot.GetManipulator("leftarm").GetArmIndices(), larm_low_limits, larm_high_limits):
+ #if low is not None and high is not None:
+ #low_limits[larm_index] = low
+ #high_limits[larm_index] = high
+ #self.robot.SetDOFLimits(low_limits, high_limits)
rospy.on_shutdown(self.stop_all)
diff --git a/brett2/mtf_trajectories.py b/brett2/mtf_trajectories.py
new file mode 100644
index 0000000..da04a1e
--- /dev/null
+++ b/brett2/mtf_trajectories.py
@@ -0,0 +1,140 @@
+#import roslib;
+#roslib.load_manifest('rospy')
+import rospy
+import numpy as np
+from brett2 import PR2
+import jds_utils.conversions as conv
+import jds_utils.math_utils as mu
+from kinematics import retiming
+import scipy.interpolate as si
+
+BodyState = np.dtype([
+ ("r_arm", float, 7),
+ ("l_arm", float, 7),
+ ("r_gripper", float, 1),
+ ("l_gripper", float, 1),
+ ("head", float, 2),
+ ("base", float, 3)
+])
+
+
+def make_joint_traj(xyzs, quats, manip, ref_frame, targ_frame, filter_options = 0):
+ "do ik and then fill in the points where ik failed"
+
+ n = len(xyzs)
+ assert len(quats) == n
+
+ robot = manip.GetRobot()
+ joint_inds = manip.GetArmJoints()
+ robot.SetActiveDOFs(joint_inds)
+ orig_joint = robot.GetActiveDOFValues()
+
+ joints = []
+ inds = []
+
+ for i in xrange(0,n):
+ mat4 = conv.trans_rot_to_hmat(xyzs[i], quats[i])
+ joint = PR2.cart_to_joint(manip, mat4, ref_frame, targ_frame, filter_options)
+ if joint is not None:
+ joints.append(joint)
+ inds.append(i)
+ robot.SetActiveDOFValues(joint)
+
+
+ robot.SetActiveDOFValues(orig_joint)
+
+
+ rospy.loginfo("found ik soln for %i of %i points",len(inds), n)
+ if len(inds) > 2:
+ joints2 = mu.interp2d(np.arange(n), inds, joints)
+ return joints2, inds
+ else:
+ return np.zeros((n, len(joints))), []
+
+
+def flatten_compound_dtype(compound_array):
+ arrays = []
+ for desc in compound_array.dtype.descr:
+ field = desc[0]
+ arr = compound_array[field]
+ if arr.ndim == 1:
+ float_arr = arr[:,None].astype('float')
+ elif arr.ndim == 2:
+ float_arr = arr.astype('float')
+ else:
+ raise Exception("subarray with field %s must be 1d or 2d"%field)
+ arrays.append(float_arr)
+
+ return np.concatenate(arrays, axis=1)
+
+
+def follow_body_traj(pr2, traj, times=None, r_arm = False, l_arm = False, r_gripper = False, l_gripper = False, head = False, base = False):
+
+ assert isinstance(pr2, PR2.PR2)
+ pr2.update_rave()
+
+ # originally 0.2
+ vel_mult = 0.4
+
+ s = 0.3
+ vel_l = []
+ vel_r = []
+
+ vel_limits = np.r_[pr2.rarm.vel_limits*vel_mult, pr2.larm.vel_limits*vel_mult, pr2.rgrip.vel_limits, pr2.lgrip.vel_limits]
+ acc_limits = np.r_[pr2.rarm.acc_limits, pr2.larm.acc_limits, np.inf, np.inf]
+
+ times, inds = retiming.retime(flatten_compound_dtype(traj), vel_limits, acc_limits)
+
+ traj = traj[inds]
+ smooth_traj = traj
+
+ n = len(traj['r_arm'])
+
+ (r_tck, _) = si.splprep(traj['r_arm'].T, s=s, u=times, k=3)
+ smooth_traj['r_arm'] = np.r_[si.splev(times, r_tck, der=0)].T
+ vel_r = np.r_[si.splev(times, r_tck, der=1)].T
+
+ (l_tck, _) = si.splprep(traj['l_arm'].T, s=s, u=times, k=3)
+ smooth_traj['l_arm'] = np.r_[si.splev(times, l_tck, der=0)].T
+ vel_l = np.r_[si.splev(times, l_tck, der=1)].T
+
+ # make sure gripper closes all the way
+ for i in range(n):
+ if smooth_traj['r_gripper'][i] < 0.005:
+ smooth_traj['r_gripper'][i] = -0.01
+ if smooth_traj['l_gripper'][i] < 0.005:
+ smooth_traj['l_gripper'][i] = -0.01
+
+ if r_arm:
+ pr2.rarm.goto_joint_positions(smooth_traj['r_arm'][0])
+ if l_arm:
+ pr2.larm.goto_joint_positions(smooth_traj['l_arm'][0])
+ if r_gripper:
+ pr2.rgrip.set_angle(smooth_traj['r_gripper'][0])
+ if l_gripper:
+ pr2.lgrip.set_angle(smooth_traj['l_gripper'][0])
+ if head or base:
+ raise NotImplementedError
+
+ pr2.join_all()
+
+ rospy.sleep(5.0)
+
+ if r_gripper:
+ pr2.rgrip.follow_timed_trajectory(times, smooth_traj['r_gripper'])
+ if l_gripper:
+ pr2.lgrip.follow_timed_trajectory(times, smooth_traj['l_gripper'])
+
+ rospy.sleep(0.5)
+
+ if r_arm:
+ pr2.rarm.follow_timed_joint_trajectory(smooth_traj['r_arm'], vel_r, times)
+ if l_arm:
+ pr2.larm.follow_timed_joint_trajectory(smooth_traj['l_arm'], vel_l, times)
+ if head:
+ pr2.head.follow_timed_trajectory(times, traj['head'])
+ if base:
+ pr2.base.follow_timed_trajectory(times, traj['base'], frame_id = '/map')
+
+ pr2.join_all()
+
diff --git a/brett2/trajectories.py b/brett2/trajectories.py
index 13c2e1f..cf4a267 100644
--- a/brett2/trajectories.py
+++ b/brett2/trajectories.py
@@ -52,7 +52,7 @@ def make_joint_traj(xyzs, quats, manip, ref_frame, targ_frame, filter_options =
-def follow_body_traj2(pr2, bodypart2traj, times=None, wait=True, base_frame = "/base_footprint"):
+def follow_body_traj2(pr2, bodypart2traj, times=None, wait=True, base_frame = "/base_footprint", speed_factor=1):
rospy.loginfo("following trajectory with bodyparts %s", " ".join(bodypart2traj.keys()))
trajectories = []
@@ -79,8 +79,8 @@ def follow_body_traj2(pr2, bodypart2traj, times=None, wait=True, base_frame = "/
trajectories = np.concatenate(trajectories, 1)
- vel_limits = np.array(vel_limits)
- acc_limits = np.array(acc_limits)
+ vel_limits = np.array(vel_limits)*speed_factor
+
times = retiming.retime_with_vel_limits(trajectories, vel_limits)
times_up = np.arange(0,times[-1]+1e-4,.1)
diff --git a/experiments/discrete_matching.py b/experiments/discrete_matching.py
new file mode 100644
index 0000000..ffa8f95
--- /dev/null
+++ b/experiments/discrete_matching.py
@@ -0,0 +1,31 @@
+import scipy.spatial.distance as ssd
+from trajoptpy.kin_utils import shortest_paths
+def match(xfeat_nk, xpos_nd, yfeat_mk, ypos_md, feature_metric = "euclidean", feature_coeff = 1, direction_metric = "cosine"):
+ """
+ M > N
+ return list of length N
+ """
+ N,K = x_nk.shape
+ D = xpos_nk.shape[1]
+ M = yfeat_mk.shape[0]
+
+ assert xfeat_nk.shape == (N,K)
+ assert xpos_nk.shape == (N,D)
+ assert yfeat_mk.shape == (M,K)
+ assert ypos_md.shape == (M,D)
+
+ nodecosts_nm = ssd.cdist(xfeat_nk, yfeat_mk, metric = feature_metric) * feature_coeff
+ edgecosts_nmm = np.empty((N-1,M,M))
+ for n in xrange(N-1):
+ dir_d = xpos_nd[n+1] - xpos_nd[n]
+ dir_mmd = ypos_md[:,None,:] - ypos_md[None,:,:]
+ dir_mm_d = dir_mmd.reshape(M*M,D)
+ dists_mm_1 = ssd.cdist(dir_mm_d, dir_d, metric = direction_metric)
+ edgecosts_nmm[n] = dists_mm_1[:,0].reshape(M,M)
+
+ paths_mn, costs_m = shortest_paths(nodecosts_nm, edgecosts_nmm)
+ best_path = paths_mn[np.argmin(costs_m)]
+ return best_path
+
+
+
\ No newline at end of file
diff --git a/iros/ArmPlannerPR2.py b/iros/ArmPlannerPR2.py
new file mode 100644
index 0000000..2e1f7c1
--- /dev/null
+++ b/iros/ArmPlannerPR2.py
@@ -0,0 +1,87 @@
+from IKPlannerFunctions import IKInterpolationPlanner
+from brett2.PR2 import PR2, Arm, IKFail
+
+class PlannerArm(Arm):
+ """
+ Planner class for the Arm.
+ """
+ def __init__ (self, pr2, rl):
+ Arm.__init__(self, pr2, rl)
+ self.planner = IKInterpolationPlanner(self, self.lr)
+
+ def goInDirection (self, d, dist, steps=10):
+ """
+ Moves the tool tip in the specified direction in the gripper frame.
+
+ Direction of movement -> d
+ f -> forward (along the tip of the gripper)
+ b -> backward
+ u -> up
+ d -> down
+ l -> left
+ r -> right
+ Distance traveled by tool tip -> dist
+ Number of points of linear interpolation -> steps
+ """
+ self.pr2.update_rave()
+ trajectory = self.planner.goInDirection(d, dist, steps)
+
+ if trajectory:
+ self.follow_joint_trajectory (trajectory)
+ else: raise IKFail
+
+
+ def goInWorldDirection (self, d, dist, steps=10):
+ """
+ Moves the tool tip in the specified direction in the base_link frame.
+
+ Direction of movement -> d
+ f -> forward
+ b -> backward
+ u -> up
+ d -> down
+ l -> left
+ r -> right
+ Distance traveled by tool tip -> dist
+ Number of points of linear interpolation -> steps
+ """
+ self.pr2.update_rave()
+ trajectory = self.planner.goInWorldDirection(d, dist, steps)
+
+ if trajectory:
+ self.follow_joint_trajectory (trajectory)
+ else: raise IKFail
+
+
+ def circleAroundRadius (self, d, rad, finAng, steps=10):
+ """
+ Moves the gripper in a circle.
+
+ Direction of circle (either inner or outer) -> d
+ Radius of circle -> rad
+ Final angle covered by circle -> finAng
+ Number of points of linear interpolation of angle -> steps
+ """
+ self.pr2.update_rave()
+ trajectory = self.planner.circleAroundRadius (d, rad, finAng)
+
+ if trajectory:
+ self.follow_joint_trajectory (trajectory)
+ else: raise IKFail
+
+
+
+class PlannerPR2 (PR2):
+ """
+ Planner class for PR2 with planning arms.
+ """
+ def __init__ (self):
+ PR2.__init__(self)
+ self.rarm = PlannerArm(self,'r')
+ self.larm = PlannerArm(self, 'l')
+
+ def gotoArmPosture (self, pos):
+ self.larm.goto_posture(pos)
+ self.rarm.goto_posture(pos)
+ self.update_rave()
+
diff --git a/iros/IKPlannerFunctions.py b/iros/IKPlannerFunctions.py
new file mode 100644
index 0000000..6a0c2a5
--- /dev/null
+++ b/iros/IKPlannerFunctions.py
@@ -0,0 +1,229 @@
+from brett2.PR2 import Arm
+import kinematics.kinematics_utils as ku
+
+import rospy
+
+import numpy as np
+from numpy.linalg import norm
+
+
+class IKInterpolationPlanner(object):
+ """
+ Class which plans based on OpenRAVE's IK solver
+ """
+ def __init__(self, _arm, _rl, _filter_options=0):
+ self.rl = _rl
+ #If useful
+ self.rl_long = {'l':'left', 'r':'right'}[self.rl]
+ self.arm = _arm
+ self.filter_options = _filter_options
+
+
+ def setFilterOptions (self, filter_options):
+ """
+ Filter options indicate basic options while planning:
+
+ IKFO_CheckEnvCollisions = 1
+ IKFO_IgnoreSelfCollisions = 2
+ IKFO_IgnoreJointLimits = 4
+ IKFO_IgnoreCustomFilters = 8
+ IKFO_IgnoreEndEffectorCollisions = 16
+ """
+ self.filter_options = filter_options
+
+
+
+ def plan(self, transforms):
+ """
+ Basic plan function which just solves the IK for each transform given.
+
+ List of transforms along path -> transforms
+ """
+ if len(transforms) < 1:
+ rospy.ERROR('Not enough transforms!')
+
+ if len(transforms) == 1:
+ firstTransform = self.arm.manip.GetEndEffectorTransform()
+ transforms = [firstTransform, transforms[0]]
+
+ trajectory = []
+
+ for transform in transforms:
+ joints = self.arm.manip.FindIKSolution(transform, self.filter_options)
+
+ if joints is None:
+ rospy.logwarn('IK Failed on ' + self.rl_long + 'arm.')
+ return joints
+
+ trajectory.append(joints)
+
+ return trajectory
+
+
+ #Assuming all the Joint DOFs are numpy arrays
+ def smoothPlan(self, transforms):
+ """
+ Smooth plan function which solves for all IK solutions for each transform given.
+ It then chooses the joint DOF values which are closest to current DOF values.
+
+ List of transforms along path -> transforms
+ """
+ if len(transforms) < 1:
+ rospy.ERROR('Not enough transforms!')
+
+ if len(transforms) == 1:
+ initTransform = self.arm.manip.GetEndEffectorTransform()
+ transforms = [initTransform, transforms[0]]
+
+ trajectory = []
+
+ #armIndices = self.arm.manip.GetJointIndices()
+ #currJoints = self.pr2.robot.GetDOFValues(armIndices)
+ currJoints = self.arm.get_joint_positions()
+
+ for transform in transforms:
+ allJoints = self.arm.manip.FindIKSolutions(transform, self.filter_options)
+
+ if not allJoints.size:
+ rospy.logwarn('IK Failed on ' + self.rl_long + 'arm.')
+ return None
+
+ allJoints = [ku.closer_joint_angles(joints, currJoints) for joints in allJoints]
+ normDifferences = [norm(currJoints-joints,2) for joints in allJoints]
+ minIndex = normDifferences.index(min(normDifferences))
+
+ trajectory.append(allJoints[minIndex])
+
+ return trajectory
+
+
+ #Not sure about scale here. Treating everything as meters.
+ #I don't think I need to worry about scale here anyway
+ def goInDirection (self, d, dist, steps=10):
+ """
+ Moves the tool tip in the specified direction in the gripper frame.
+
+ Direction of movement -> d
+ f -> forward (along the tip of the gripper)
+ b -> backward
+ u -> up
+ d -> down
+ l -> left
+ r -> right
+ Distance traveled by tool tip -> dist
+ Number of points of linear interpolation -> steps
+ """
+ initTransform = self.arm.manip.GetEndEffectorTransform()
+ initOrigin = initTransform[0:3,3]
+
+ if d == 'f':
+ dirVec = initTransform[0:3,2]
+ elif d == 'b':
+ dirVec = -1*initTransform[0:3,2]
+ elif d == 'u':
+ dirVec = initTransform[0:3,1]
+ elif d == 'd':
+ dirVec = -1*initTransform[0:3,1]
+ elif d == 'l':
+ dirVec = initTransform[0:3,0]
+ elif d == 'r':
+ dirVec = -1*initTransform[0:3,0]
+ else:
+ rospy.ERROR("Invalid direction: " + d)
+
+ endOffset = dirVec*float(dist)
+
+ transforms = []
+
+ for step in range(steps+1):
+ currVec = initOrigin + float(step)/steps*endOffset
+
+ newTfm = initTransform.copy()
+ newTfm[0:3,3] = np.unwrap(currVec)
+
+ transforms.append(newTfm)
+
+ return self.smoothPlan(transforms)
+
+
+
+ #Not sure about scale here. Treating everything as meters.
+ #I don't think I need to worry about scale here anyway
+ def goInWorldDirection (self, d, dist, steps=10):
+ """
+ Moves the tool tip in the specified direction in the base_link frame.
+
+ Direction of movement -> d
+ f -> forward
+ b -> backward
+ u -> up
+ d -> down
+ l -> left
+ r -> right
+ Distance traveled by tool tip -> dist
+ Number of points of linear interpolation -> steps
+ """
+ initTransform = self.arm.manip.GetEndEffectorTransform()
+ initOrigin = initTransform[0:3,3]
+ baseTransform = self.arm.pr2.robot.GetLink('base_link').GetTransform()
+
+ if d == 'u':
+ dirVec = baseTransform[0:3,2]
+ elif d == 'd':
+ dirVec = -1*baseTransform[0:3,2]
+ elif d == 'l':
+ dirVec = baseTransform[0:3,1]
+ elif d == 'r':
+ dirVec = -1*baseTransform[0:3,1]
+ elif d == 'f':
+ dirVec = baseTransform[0:3,0]
+ elif d == 'b':
+ dirVec = -1*baseTransform[0:3,0]
+ else:
+ rospy.ERROR("Invalid direction: " + d)
+
+ endOffset = dirVec*float(dist)
+
+ transforms = []
+
+ for step in range(steps+1):
+ currVec = initOrigin + endOffset*(float(step)/steps)
+
+ newTfm = initTransform.copy()
+ newTfm[0:3,3] = np.unwrap(currVec)
+
+ transforms.append(newTfm)
+
+ return self.smoothPlan(transforms)
+
+
+ def circleAroundRadius (self, d, rad, finAng, steps=10):
+ """
+ Moves the gripper in a circle.
+
+ Direction of circle (either inner or outer) -> d
+ Radius of circle -> rad
+ Final angle covered by circle -> finAng
+ Number of points of linear interpolation of angle -> steps
+ """
+ WorldFromEETfm = self.arm.manip.GetEndEffectorTransform()
+
+ initTfm = np.eye(4)
+ initOrigin = d*rad*np.array([0,1,0])
+ initTfm[0:3,3] = np.unwrap(initOrigin)
+ transforms = []
+
+ for step in range(steps+1):
+ ang = float(finAng)*step/steps
+
+ rotMat = np.eye(4)
+ rotMat[0,0] = np.cos(d*ang)
+ rotMat[0,1] = -np.sin(d*ang)
+ rotMat[1,0] = np.sin(d*ang)
+ rotMat[1,1] = np.cos(d*ang)
+ rotMat[0:3,3] = np.unwrap(-1*initOrigin)
+ print rotMat
+
+ transforms.append(WorldFromEETfm.dot(rotMat.dot(initTfm)))
+
+ return self.smoothPlan(transforms)
diff --git a/iros/TrajectoryPlayback.py b/iros/TrajectoryPlayback.py
new file mode 100644
index 0000000..95c148c
--- /dev/null
+++ b/iros/TrajectoryPlayback.py
@@ -0,0 +1,62 @@
+import rospy
+import numpy as np
+import brett2.trajectories as tj
+from brett2.PR2 import PR2
+import trajectory_msgs.msg as tm
+
+if __name__ == "__main__":
+ rospy.init_node("trajectory_playback")
+ p = PR2 ()
+
+ traj_file_path = '/home/mallory/fuerte_workspace/sandbox/suturing/pr2_suturing/joint_states_listener/trajectories'
+ traj_name = []
+ ## Trajectory Options
+ #traj_name.append('/full_running_suture/pt1') #pierce both flaps and pull through
+ #traj_name.append('/full_running_suture/pt2') #tie right 2 loop knot
+ #traj_name.append('/full_running_suture/pt3') #tie left 1 loop knot
+ #traj_name.append('/full_running_suture/pt4') #tie right 1 loop knot
+ #traj_name.append('/full_running_suture/pt5') #tie left 1 loop knot
+ #traj_name.append('/full_running_suture/pt6') #first stitch of running suture
+ #traj_name.append('/full_running_suture/pt7') #second stitch of running suture
+ #traj_name.append('/full_running_suture/pt8') #third stitch of running suture
+ #traj_name.append('/full_running_suture/pt9') #final stitch of running suture
+ #traj_name.append('/full_running_suture/pt10') #final knot pt1
+ #traj_name.append('/full_running_suture/pt11') #final knot pt2 --- thread was approx 11.5 feet long
+
+ traj_name.append('/full_interrupted_suture/pt1') #stitch 1, pierce both flaps and pull through
+ traj_name.append('/full_interrupted_suture/pt2') #tie right 2 loop knot
+ traj_name.append('/full_interrupted_suture/pt3') #tie left 1 loop knot
+ traj_name.append('/full_interrupted_suture/pt4') #tie right 1 loop knot
+ traj_name.append('/full_interrupted_suture/pt5') #tie left 1 loop knot
+ traj_name.append('/full_interrupted_suture/pt6') #cut tails off
+ traj_name.append('/full_interrupted_suture/pt7') #stitch 2, pierce both flaps and pull through
+ traj_name.append('/full_interrupted_suture/pt8') #tie right 2 loop knot
+ traj_name.append('/full_interrupted_suture/pt9') #tie left 1 loop knot --- weird smoothing at the end
+ traj_name.append('/full_interrupted_suture/pt10') #tie right 1 loop knot
+ traj_name.append('/full_interrupted_suture/pt11') #tie left 1 loop knot
+ traj_name.append('/full_interrupted_suture/pt12') #cut tails off
+
+ ## Load Trajectory
+ for i in range(1):
+ i=11
+ larm_traj = np.load(traj_file_path + traj_name[i] + '_tj_larm.npy')
+ rarm_traj = np.load(traj_file_path + traj_name[i] + '_tj_rarm.npy')
+ lgrip_traj = np.load(traj_file_path + traj_name[i] + '_tj_lgrip.npy')
+ rgrip_traj = np.load(traj_file_path + traj_name[i] + '_tj_rgrip.npy')
+
+ print 'loaded', traj_name[i], 'trajectory'
+
+ length = len(larm_traj)
+
+ full_traj = np.zeros(length,
+ dtype=[('r_arm', '7float64'), ('l_arm', '7float64'), ('r_gripper', 'float64'), ('l_gripper', 'float64')])
+
+ full_traj['r_arm'] = rarm_traj
+ full_traj['l_arm'] = larm_traj
+ full_traj['r_gripper'] = rgrip_traj
+ full_traj['l_gripper'] = lgrip_traj
+
+ raw_input("Confirm trajectory type, press ENTER to start playback ...")
+
+ tj.follow_body_traj(p, full_traj, r_arm = True, l_arm = True, r_gripper = True, l_gripper = True)
+
diff --git a/iros/__init__.py b/iros/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/iros/dev/circle_finder.py b/iros/dev/circle_finder.py
new file mode 100644
index 0000000..a830420
--- /dev/null
+++ b/iros/dev/circle_finder.py
@@ -0,0 +1,200 @@
+#import cv2
+#import numpy as np
+#import rospy
+#import geometry_msgs.msg as gm
+#import sensor_msgs.msg as sm
+#import sys
+
+
+#if args.mode == "test":
+ #from jds_image_proc.pcd_io import load_xyzrgb
+ #import openravepy
+
+ #env = openravepy.Environment()
+ #env.Load("robots/pr2-beta-static.zae")
+ #robot = env.GetRobots()[0]
+ #robot.SetDOFValues([1.5], [14])
+ #hmat = robot.GetLink("narrow_stereo_gazebo_r_stereo_camera_optical_frame").GetTransform()
+
+ #for i in range(5):
+
+ #xyz_temp, rgb_temp = load_xyzrgb("pr2_suture_scene%s.pcd"%i)
+ #xyz.append(xyz_temp)
+ #rgb.append(rgb_temp)
+
+ #xyz1 = np.c_[xyz_temp.reshape(-1,3), np.ones((xyz_temp.size/3,1))]
+ #xyz1_tf = np.dot(xyz1, hmat.T)
+ #xyz_tf_temp = xyz1_tf[:,:3].reshape(xyz_temp.shape)
+ #xyz_tf_temp[np.isnan(xyz_tf_temp)] = -2
+
+ #xyz_tf.append(xyz_tf_temp)
+
+ #del xyz_temp
+ #del rgb_temp
+
+ #print 'xyz[0]', xyz[0]
+ #print 'xyz_tf[0]', xyz_tf[0]
+
+#elif args.mode == "live":
+ #import brett2.ros_utils as ru
+
+ #listener = ru.get_tf_listener()
+
+ #for i in range(5):
+ #print "waiting for messages on cloud topic %s"%args.cloud_topic
+ #msg = rospy.wait_for_message(args.cloud_topic, sm.PointCloud2)
+ #print "got msg %s!"%i
+
+ #xyz_temp, rgb_temp = ru.pc2xyzrgb(msg)
+ #if (xyz_temp.shape[0] == 1 or xyz_temp.shape[1] == 1): raise Exception("needs to be an organized point cloud")
+
+ #xyz.append(xyz_temp)
+ #rgb.append(rgb_temp)
+
+ #xyz_tf_temp = ru.transform_points(xyz_temp, listener, "base_footprint", "/camera_rgb_optical_frame")
+ #xyz_tf_temp[np.isnan(xyz_tf_temp)] = -2
+ #xyz_tf.append(xyz_tf_temp)
+ ##print 'xyz_temp', xyz_temp
+
+ #del xyz_temp
+ #del rgb_temp
+
+ #rospy.sleep(1.0)
+
+ ##print 'xyz_tf[0]', xyz_tf[0]
+
+#raw_input("Press enter to continue...")
+
+import cv
+import cv2
+import math
+import numpy as np
+from jds_image_proc.pcd_io import load_xyzrgb
+from jds_utils.colorize import colorize
+
+class GetClick:
+ xy = None
+ done = False
+ def callback(self, event, x, y, flags, param):
+ if self.done:
+ return
+ elif event == cv2.EVENT_LBUTTONDOWN:
+ self.xy = (x,y)
+ self.done = True
+
+#xyz, rgb = load_xyzrgb("suture_scene(holes).pcd")
+#xyz, rgb = load_xyzrgb("suture_scene(findholescut)0.pcd")
+xyz, rgb = load_xyzrgb("suture_scene(findneedle)0.pcd")
+img = rgb.copy()
+img2 = rgb.copy()
+
+d_red = cv2.cv.RGB(250, 55, 65)
+l_red = cv2.cv.RGB(250, 200, 200)
+d_blue = cv2.cv.RGB(40, 55, 200)
+
+cv2.namedWindow("rgb")
+cv2.imshow("rgb", img)
+cv2.waitKey(10)
+
+SIZEMIN = 20
+SIZEMAX = 33
+
+COLMIN = 200
+COLMAX = 500
+
+gray_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
+
+#########################################
+### MSER Detector Implementation
+#########################################
+
+detector = cv2.FeatureDetector_create('MSER')
+fs = detector.detect(gray_img)
+fs.sort(key = lambda x: -x.size)
+
+def supress(x):
+ for f in fs:
+ distx = f.pt[0] - x.pt[0]
+ disty = f.pt[1] - x.pt[1]
+ dist = math.sqrt(distx*distx + disty*disty)
+ if (f.size > x.size) and (dist < f.size/2):
+ return True
+
+def fil(f):
+ if (f.size > SIZEMIN) and (f.size < SIZEMAX) and (f.pt[0] > COLMIN) and (f.pt[0] < COLMAX):
+ return True
+
+sfs = [x for x in fs if not supress(x)]
+circles = [c for c in sfs if fil(c)]
+circles.sort(key = lambda c: c.pt[1], reverse=True)
+
+horiz_pairs = []
+diag_pairs = []
+for i in range(len(circles)/2):
+ horiz_pairs.append([])
+ horiz_pairs[i].append(circles[2*i])
+ horiz_pairs[i].append(circles[(2*i)+1])
+
+for i in range(len(horiz_pairs)):
+ horiz_pairs[i].sort(key = lambda c: c.pt[0])
+
+for i in range(len(horiz_pairs)-1):
+ diag_pairs.append([])
+ diag_pairs[i].append(horiz_pairs[i][1])
+ diag_pairs[i].append(horiz_pairs[i+1][0])
+
+#print 'num circles', len(circles)
+#print 'num horiz_pairs', len(horiz_pairs)
+#print 'num diag_pairs', len(diag_pairs)
+
+cv2.namedWindow("rgb with MSER circles")
+cv2.imshow("rgb with MSER circles", img)
+cv2.waitKey(100)
+
+n = 0
+for p in range(len(horiz_pairs)):
+ for c in horiz_pairs[p]:
+ n += 1
+ cv2.circle(img, (int(c.pt[0]), int(c.pt[1])), int(c.size/2), d_red, 2, cv2.CV_AA)
+ cv2.circle(img, (int(c.pt[0]), int(c.pt[1])), int(c.size/2), d_blue, 1, cv2.CV_AA)
+ cv2.imshow("rgb with MSER circles", img)
+ cv2.waitKey(10)
+ #raw_input("Circle %i. Press enter to continue..."%n)
+
+#d = 0
+#for p in range(len(diag_pairs)):
+ #for c in diag_pairs[p]:
+ #d += 1
+ #cv2.circle(img, (int(c.pt[0]), int(c.pt[1])), int(c.size/2), d_red, 2, cv2.CV_AA)
+ #cv2.circle(img, (int(c.pt[0]), int(c.pt[1])), int(c.size/2), d_blue, 1, cv2.CV_AA)
+ #cv2.imshow("rgb with circles", img)
+ #cv2.waitKey(10)
+ #raw_input("Circle %i. Press enter to continue..."%d)
+
+
+#########################################
+### Hough Transform Implementation
+#########################################
+#cv2.HoughCircles(image, method, dp, minDist[, circles[, param1[, param2[, minRadius[, maxRadius]]]]])
+#circles = cv2.HoughCircles(img,cv2.cv.CV_HOUGH_GRADIENT,1,10,param1=100,param2=30,minRadius=5,maxRadius=20)
+#for i in circles[0,:]:
+ #cv2.circle(cimg,(i[0],i[1]),i[2],(0,255,0),1)
+
+circles = cv2.HoughCircles(gray_img, cv.CV_HOUGH_GRADIENT, 1, 50, param1=240, param2=20, minRadius=10, maxRadius=30)
+circles = np.uint16(np.around(circles))
+
+for c in circles[0,:]:
+ cv2.circle(img2, (c[0], c[1]), c[2], d_red, 2, cv2.CV_AA)
+ cv2.circle(img2, (c[0], c[1]), c[2], d_blue, 1, cv2.CV_AA)
+
+cv2.namedWindow("rgb with hough circles")
+cv2.imshow("rgb with hough circles", img2)
+cv2.waitKey(100)
+
+
+cv2.imshow("rgb with hough circles", img2)
+cv2.waitKey(100)
+raw_input("Press enter to finish...")
+cv2.destroyAllWindows()
+
+
diff --git a/iros/dev/image_gui.py b/iros/dev/image_gui.py
new file mode 100644
index 0000000..a08bfd7
--- /dev/null
+++ b/iros/dev/image_gui.py
@@ -0,0 +1,106 @@
+import argparse
+parser = argparse.ArgumentParser()
+parser.add_argument("--mode", choices=["live","test"], default="live")
+parser.add_argument("--cloud_topic", default="/drop/points")
+parser.add_argument("--plot3d", action="store_true")
+args = parser.parse_args()
+
+
+# how to get a pcd file
+import cloudprocpy # my homemade pcl python bindings. built with trajopt if BUILD_CLOUDPROC enabled
+grabber=cloudprocpy.CloudGrabber()
+xyzrgb = grabber.getXYZRGB()
+xyzrgb.save("suture_scene1.pcd")
+
+raw_input("got pcd. Press enter to continue...")
+
+
+
+import cv2
+import numpy as np
+
+
+if args.mode == "test":
+ from jds_image_proc.pcd_io import load_xyzrgb
+ import openravepy
+
+ xyz, rgb = load_xyzrgb("pr2_suture_scene.pcd")
+ env = openravepy.Environment()
+ env.Load("robots/pr2-beta-static.zae")
+ robot = env.GetRobots()[0]
+ robot.SetDOFValues([1.5], [14])
+ hmat = robot.GetLink("narrow_stereo_gazebo_r_stereo_camera_optical_frame").GetTransform()
+ xyz1 = np.c_[xyz.reshape(-1,3), np.ones((xyz.size/3,1))]
+ xyz1_tf = np.dot(xyz1, hmat.T)
+ xyz_tf = xyz1_tf[:,:3].reshape(xyz.shape)
+
+elif args.mode == "live":
+ import brett2.ros_utils as ru
+ import rospy
+ import sensor_msgs.msg as sm
+
+ rospy.init_node("image_gui", disable_signals = True)
+ listener = ru.get_tf_listener()
+ print "waiting for message on cloud topic %s"%args.cloud_topic
+ msg = rospy.wait_for_message(args.cloud_topic, sm.PointCloud2)
+ print "got it!"
+ xyz, rgb = ru.pc2xyzrgb(msg)
+ if (xyz.shape[0] == 1 or xyz.shape[1] == 1): raise Exception("needs to be an organized point cloud")
+ xyz_tf = ru.transform_points(xyz, listener, "base_footprint", "/camera_rgb_optical_frame")
+
+xyz_tf[np.isnan(xyz_tf)] = -2
+rgb_plot = rgb.copy()
+depth_plot = xyz_tf[:,:,2].astype('float')
+depth_plot[np.isnan(depth_plot)] = 0
+depth_plot -= depth_plot.min()
+depth_plot = (depth_plot * (255 / depth_plot.max())).astype('uint8')
+
+class GetClick:
+ xy = None
+ done = False
+ def callback(self, event, x, y, flags, param):
+ if self.done:
+ return
+ elif event == cv2.EVENT_LBUTTONDOWN:
+ self.xy = (x,y)
+ self.done = True
+
+cv2.namedWindow("rgb")
+rect_corners = []
+for i in xrange(2):
+ gc = GetClick()
+ cv2.setMouseCallback("rgb", gc.callback)
+ while not gc.done:
+ cv2.imshow("rgb", rgb_plot)
+ cv2.waitKey(10)
+ rect_corners.append(gc.xy)
+
+xy_tl = np.array(rect_corners).min(axis=0)
+xy_br = np.array(rect_corners).max(axis=0)
+cv2.rectangle(rgb_plot, tuple(xy_tl), tuple(xy_br), (0, 255, 0)) ## this doesn't seem to be working...
+
+# extract depths from drawn rectangle ##ADD STUFF HERE to make this work better
+colmin, rowmin = xy_tl
+colmax, rowmax = xy_br
+
+z_rectangle = xyz_tf[rowmin:rowmax, colmin:colmax, 2] # z values in extracted rectangle
+
+row_needle, col_needle = np.unravel_index(z_rectangle.argmax(), z_rectangle.shape)
+col_needle += colmin # since these indices are from small rectangle
+row_needle += rowmin
+cv2.circle(rgb_plot, (col_needle, row_needle), 3, (255, 0, 0), 2) ## this doesn't seem to be working...
+
+# plot the point cloud with a circle around "highest" point
+if args.plot3d:
+ from mayavi import mlab
+ x,y,z = xyz_tf[~np.isnan(xyz_tf[:,:,0])].T
+ mlab.points3d(x,y,z,color=(1,0,0), mode="2dvertex")
+ xneedle,yneedle,zneedle = xyz_tf[row_needle, col_needle]
+ print "xyz needle point", xneedle, yneedle, zneedle
+ mlab.points3d([xneedle], [yneedle], [zneedle], color=(0,1,0), mode="sphere", scale_factor=.04, opacity=.2)
+ mlab.show()
+else:
+ while True:
+ cv2.imshow("rgb",rgb_plot)
+ #cv2.imshow("depth", depth_plot)
+ cv2.waitKey(10)
diff --git a/iros/dev/image_gui2.py b/iros/dev/image_gui2.py
new file mode 100755
index 0000000..5d0b2e1
--- /dev/null
+++ b/iros/dev/image_gui2.py
@@ -0,0 +1,187 @@
+import argparse
+parser = argparse.ArgumentParser()
+parser.add_argument("--mode", choices=["live","test"], default="live")
+parser.add_argument("--cloud_topic", default="/drop/points")
+parser.add_argument("--plot3d", action="store_true")
+args = parser.parse_args()
+
+"""
+# how to get a pcd file
+# plug asus into local computer
+import cloudprocpy # my homemade pcl python bindings. built with trajopt if BUILD_CLOUDPROC enabled
+grabber=cloudprocpy.CloudGrabber()
+xyzrgb = grabber.getXYZRGB()
+xyzrgb.save("pr2_suture_scene.pcd")
+"""
+
+import cv2
+import numpy as np
+import rospy
+import geometry_msgs.msg as gm
+import sensor_msgs.msg as sm
+import sys
+
+#rospy.init_node("image_gui", disable_signals = True)
+rospy.init_node("image_gui")
+
+xyz = []
+xyz_tf = []
+rgb = []
+
+
+if args.mode == "test":
+ from jds_image_proc.pcd_io import load_xyzrgb
+ import openravepy
+
+ env = openravepy.Environment()
+ env.Load("robots/pr2-beta-static.zae")
+ robot = env.GetRobots()[0]
+ robot.SetDOFValues([1.5], [14])
+ hmat = robot.GetLink("narrow_stereo_gazebo_r_stereo_camera_optical_frame").GetTransform()
+
+ for i in range(5):
+
+ xyz_temp, rgb_temp = load_xyzrgb("pr2_suture_scene%s.pcd"%i)
+ xyz.append(xyz_temp)
+ rgb.append(rgb_temp)
+
+ xyz1 = np.c_[xyz_temp.reshape(-1,3), np.ones((xyz_temp.size/3,1))]
+ xyz1_tf = np.dot(xyz1, hmat.T)
+ xyz_tf_temp = xyz1_tf[:,:3].reshape(xyz_temp.shape)
+ xyz_tf_temp[np.isnan(xyz_tf_temp)] = -2
+
+ xyz_tf.append(xyz_tf_temp)
+
+ del xyz_temp
+ del rgb_temp
+
+ #print 'xyz[0]', xyz[0]
+ #print 'xyz_tf[0]', xyz_tf[0]
+
+elif args.mode == "live":
+ import brett2.ros_utils as ru
+
+ listener = ru.get_tf_listener()
+
+ for i in range(5):
+ print "waiting for messages on cloud topic %s"%args.cloud_topic
+ msg = rospy.wait_for_message(args.cloud_topic, sm.PointCloud2)
+ print "got msg %s!"%i
+
+ xyz_temp, rgb_temp = ru.pc2xyzrgb(msg)
+ if (xyz_temp.shape[0] == 1 or xyz_temp.shape[1] == 1): raise Exception("needs to be an organized point cloud")
+
+ xyz.append(xyz_temp)
+ rgb.append(rgb_temp)
+
+ xyz_tf_temp = ru.transform_points(xyz_temp, listener, "base_footprint", "/camera_rgb_optical_frame")
+ xyz_tf_temp[np.isnan(xyz_tf_temp)] = -2
+ xyz_tf.append(xyz_tf_temp)
+ #print 'xyz_temp', xyz_temp
+
+ del xyz_temp
+ del rgb_temp
+
+ rospy.sleep(1.0)
+
+ #print 'xyz_tf[0]', xyz_tf[0]
+
+#raw_input("Press enter to continue...")
+
+rgb_plot = rgb[0].copy()
+
+#depth_plot = xyz_tf[:,:,2].astype('float')
+#depth_plot[np.isnan(depth_plot)] = 0
+#depth_plot -= depth_plot.min()
+#depth_plot = (depth_plot * (255 / depth_plot.max())).astype('uint8')
+
+class GetClick:
+ xy = None
+ done = False
+ def callback(self, event, x, y, flags, param):
+ if self.done:
+ return
+ elif event == cv2.EVENT_LBUTTONDOWN:
+ self.xy = (x,y)
+ self.done = True
+
+cv2.namedWindow("rgb")
+rect_corners = []
+for i in xrange(2):
+ gc = GetClick()
+ cv2.setMouseCallback("rgb", gc.callback)
+ while not gc.done:
+ cv2.imshow("rgb", rgb_plot)
+ cv2.waitKey(10)
+ rect_corners.append(gc.xy)
+
+xy_tl = np.array(rect_corners).min(axis=0)
+xy_br = np.array(rect_corners).max(axis=0)
+cv2.rectangle(rgb_plot, tuple(xy_tl), tuple(xy_br), (0, 255, 0)) ## this doesn't seem to be working...
+
+colmin, rowmin = xy_tl
+colmax, rowmax = xy_br
+
+row_needle = []
+col_needle = []
+
+# extract depths from drawn rectangle
+for i in range(5):
+ z_rectangle = xyz_tf[i][rowmin:rowmax, colmin:colmax, 2] # z values in extracted rectangle
+ row_needle_temp, col_needle_temp = np.unravel_index(z_rectangle.argmax(), z_rectangle.shape)
+
+ col_needle_temp += colmin # since these indices are from small rectangle
+ row_needle_temp += rowmin
+
+ row_needle.append(row_needle_temp)
+ col_needle.append(col_needle_temp)
+
+ del z_rectangle
+ del col_needle_temp
+ del row_needle_temp
+
+
+xneedle = np.zeros((5))
+yneedle = np.zeros((5))
+zneedle = np.zeros((5))
+
+for i in range(5):
+ xneedle[i], yneedle[i], zneedle[i] = xyz_tf[i][row_needle[i], col_needle[i]]
+
+print "xyz needle point %s"%i, xneedle, yneedle, zneedle
+
+ind = np.argmax(zneedle)
+
+max_needle = []
+max_needle.append(xneedle[ind])
+max_needle.append(yneedle[ind])
+max_needle.append(zneedle[ind])
+
+#rgb_plot_best = rgb[ind].copy()
+#cv2.namedWindow("best_rgb")
+#cv2.imshow("best_rgb", rgb_plot_best)
+
+cv2.circle(rgb_plot, (col_needle[ind], row_needle[ind]), 3, (255, 0, 0), 2) ## this doesn't seem to be working...
+
+#save needle location to np file
+np.save('/home/mallory/mallory_workspace/suturing/python/iros/needleLoc.npy', np.array(max_needle))
+print 'needle location saved!'
+
+
+# plot the point cloud with a circle around "highest" point
+if args.plot3d:
+ from mayavi import mlab
+ x,y,z = xyz_tf[ind][~np.isnan(xyz_tf[ind][:,:,0])].T
+ mlab.points3d(x,y,z,color=(1,0,0), mode="2dvertex")
+ mlab.points3d([max_needle[0]], [max_needle[1]], [max_needle[2]], color=(0,1,0), mode="sphere", scale_factor=.04, opacity=.2)
+ mlab.show()
+#else:
+ #rospy.sleep(1)
+ #sys.exit()
+ #while True:
+ # cv2.imshow("rgb",rgb_plot)
+ # cv2.imshow("depth", depth_plot)
+ # cv2.waitKey(10)
+
+raw_input("press enter when done")
+#rospy.spin()
diff --git a/iros/dev/line_finder.py b/iros/dev/line_finder.py
new file mode 100644
index 0000000..f52207a
--- /dev/null
+++ b/iros/dev/line_finder.py
@@ -0,0 +1,53 @@
+import cv
+import cv2
+import math
+import numpy as np
+from jds_image_proc.pcd_io import load_xyzrgb
+from jds_utils.colorize import colorize
+
+#xyz, rgb = load_xyzrgb("suture_scene(holes).pcd")
+#xyz, rgb = load_xyzrgb("suture_scene(findholescut)0.pcd")
+xyz, rgb = load_xyzrgb("suture_scene(findneedle)0.pcd")
+img = rgb.copy()
+img2 = rgb.copy()
+gray_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
+
+d_red = cv2.cv.RGB(150, 55, 65)
+l_red = cv2.cv.RGB(250, 200, 200)
+d_blue = cv2.cv.RGB(40, 55, 200)
+clr = cv2.cv.RGB(0, 0, 0)
+
+cv2.namedWindow("rgb")
+cv2.imshow("rgb", img)
+cv2.waitKey(10)
+
+
+#cv2.HoughLinesP(image, rho, theta, threshold[, lines[, minLineLength[, maxLineGap]]]) lines
+#cv2.HoughLinesP( img, lines, 1, CV_PI/180, 80, 30, 10 )
+
+edges = cv2.Canny(gray_img, 80, 240)
+cv2.imshow("edges", edges)
+cv2.waitKey(100)
+
+lines = cv2.HoughLinesP(edges, 1, cv.CV_PI/360, 1, minLineLength = 50, maxLineGap = 5)
+
+vert_lines = [l for l in lines[0] if (math.atan2(abs(l[0] - l[2]) , abs(l[1] - l[3])) == 0.0)]
+
+for l in vert_lines:
+ cv2.line(img, (l[0], l[1]), (l[2], l[3]), clr, 2)
+
+#cv2.line(img, (vert_lines[0][0], vert_lines[0][1]), (vert_lines[0][2], vert_lines[0][3]), clr, 2)
+
+cv2.imshow("rgb with vertical lines", img)
+cv2.waitKey(100)
+
+for l in lines[0]:
+ cv2.line(img2, (l[0], l[1]), (l[2], l[3]), clr, 2)
+
+cv2.imshow("rgb with all lines", img2)
+cv2.waitKey(100)
+
+raw_input("Press enter to finish...")
+cv2.destroyAllWindows()
+
+
diff --git a/iros/dev/simple_clicker_dev.py b/iros/dev/simple_clicker_dev.py
new file mode 100644
index 0000000..9997cb7
--- /dev/null
+++ b/iros/dev/simple_clicker_dev.py
@@ -0,0 +1,293 @@
+import argparse
+parser = argparse.ArgumentParser()
+parser.add_argument("mode", choices=["live","test"], default="live")
+parser.add_argument("object", choices=["holes_cut", "needle"])
+parser.add_argument("cloud_topic", default="/drop/points")
+parser.add_argument("plot3d", action="store_true")
+args = parser.parse_args()
+
+
+# how to get a pcd file
+#import cloudprocpy
+#grabber=cloudprocpy.CloudGrabber()
+#xyzrgb = grabber.getXYZRGB()
+#xyzrgb.save("pr2_suture_scene(needle_finding).pcd")
+
+#raw_input("got point cloud. press enter to continue")
+
+import cv2
+import numpy as np
+import rospy
+import openravepy
+import sys
+
+import geometry_msgs.msg as gm
+import sensor_msgs.msg as sm
+from jds_image_proc.pcd_io import load_xyzrgb
+from jds_utils.colorize import colorize
+import brett2.ros_utils as ru
+
+rospy.init_node("image_gui")
+
+#########################################
+### find holes and cut
+#########################################
+def find_holes_cut(plot):
+
+ hole1_center = []
+ hole2_center = []
+ cut_endpoints = []
+
+ print colorize("click on the centers of the two relevant holes", 'red', bold=True)
+
+ for i in xrange(2):
+ gc = GetClick()
+ cv2.setMouseCallback("rgb", gc.callback)
+ while not gc.done:
+ cv2.imshow("rgb", plot)
+ cv2.waitKey(10)
+ if i == 1:
+ hole1_center.append(gc.x)
+ hole1_center.append(gc.y)
+ else:
+ hole2_center.append(gc.x)
+ hole2_center.append(gc.y)
+
+ cv2.circle(plot, (hole1_center[0], hole1_center[1]), 10, (255, 0, 0), 2)
+ cv2.circle(plot, (hole2_center[0], hole2_center[1]), 10, (255, 0, 0), 2)
+ cv2.imshow("rgb", plot)
+ cv2.waitKey(100)
+
+ x_hole1, y_hole1, z_hole1 = xyz_tf[hole1_center[0], hole1_center[1]]
+ x_hole2, y_hole2, z_hole2 = xyz_tf[hole2_center[0], hole2_center[1]]
+
+ print 'hole1 3d location', x_hole1, y_hole1, z_hole1
+ print 'hole2 3d location', x_hole2, y_hole2, z_hole2
+
+ raw_input("press enter to continue")
+
+ ### find cut
+
+ print colorize("click at the top and bottom of the cut line", 'red', bold=True)
+
+ for i in xrange(2):
+ gc = GetClick()
+ cv2.setMouseCallback("rgb", gc.callback)
+ while not gc.done:
+ cv2.imshow("rgb", plot)
+ cv2.waitKey(10)
+ cut_endpoints.append(gc.xy)
+
+ xy_t = np.array(cut_endpoints).min(axis=0)
+ xy_b = np.array(cut_endpoints).max(axis=0)
+
+ cv2.line(plot, tuple(xy_t), tuple(xy_b), (0, 255, 0), 2)
+ cv2.imshow("rgb", plot)
+ cv2.waitKey(100)
+
+ x1_cut, y1_cut, z1_cut = xyz_tf[tuple(xy_t)]
+ x2_cut, y2_cut, z2_cut = xyz_tf[tuple(xy_b)]
+
+
+ #midpoint of line
+ mid_y = (xy_t[1] - xy_b[1])/2
+ mid_plot_x = xy_b[0]
+ mid_plot_y = xy_t[1] - mid_y
+
+ cv2.circle(plot, (mid_plot_x, mid_plot_y), 5, (0, 0, 255), -1)
+ cv2.imshow("rgb", plot)
+ cv2.waitKey(100)
+
+ mid_ptx, mid_pty, mid_ptz = xyz_tf[mid_plot_x, mid_plot_y]
+
+ print 'top of cut 3d', x1_cut, y1_cut, z1_cut
+ print 'bottom of cut 3d', x2_cut, y2_cut, z2_cut
+ print 'middle of cut 3d', mid_ptx, mid_pty, mid_ptz
+
+ raw_input("press enter to continue")
+
+
+#########################################
+### find needle
+#########################################
+def find_needle(plot):
+
+ needle_rect_corners = []
+
+ print colorize("click at the corners of a rectangle which encompasses the needle tip", 'red', bold=True)
+
+ for i in xrange(2):
+ gc = GetClick()
+ cv2.setMouseCallback("rgb", gc.callback)
+ while not gc.done:
+ cv2.imshow("rgb", plot)
+ cv2.waitKey(10)
+ needle_rect_corners.append(gc.xy)
+
+ xy_tl = np.array(needle_rect_corners).min(axis=0)
+ xy_br = np.array(needle_rect_corners).max(axis=0)
+
+ cv2.rectangle(plot, tuple(xy_tl), tuple(xy_br), (0, 255, 0))
+ cv2.imshow("rgb", plot)
+ cv2.waitKey(100)
+
+ colmin, rowmin = xy_tl
+ colmax, rowmax = xy_br
+
+ row_needle = []
+ col_needle = []
+
+ # extract depths from drawn rectangle
+ for i in range(3):
+ z_rectangle = xyz_tf[i][rowmin:rowmax, colmin:colmax, 2] # z values in extracted rectangle
+ row_needle_temp, col_needle_temp = np.unravel_index(z_rectangle.argmax(), z_rectangle.shape)
+
+ col_needle_temp += colmin # since these indices are from small rectangle
+ row_needle_temp += rowmin
+
+ row_needle.append(row_needle_temp)
+ col_needle.append(col_needle_temp)
+
+ del z_rectangle
+ del col_needle_temp
+ del row_needle_temp
+
+
+ xneedle = np.zeros((3))
+ yneedle = np.zeros((3))
+ zneedle = np.zeros((3))
+
+ for i in range(3):
+ xneedle[i], yneedle[i], zneedle[i] = xyz_tf[i][row_needle[i], col_needle[i]]
+
+ print "xyz needle point %s"%i, xneedle, yneedle, zneedle
+
+ ind = np.argmax(zneedle)
+
+ max_needle = []
+ max_needle.append(xneedle[ind])
+ max_needle.append(yneedle[ind])
+ max_needle.append(zneedle[ind])
+
+ cv2.circle(rgb_plot, (col_needle[ind], row_needle[ind]), 3, (255, 0, 0), 2)
+ cv2.imshow("rgb", rgb_plot)
+ cv2.waitKey(100)
+
+ # plot the point cloud with a circle around "highest" point
+ from mayavi import mlab
+ x,y,z = xyz_tf[ind][~np.isnan(xyz_tf[ind][:,:,0])].T
+ mlab.points3d(x,y,z,color=(1,0,0), mode="2dvertex")
+ mlab.points3d([max_needle[0]], [max_needle[1]], [max_needle[2]], color=(0,1,0), mode="sphere", scale_factor=.04, opacity=.2)
+ mlab.show()
+
+ raw_input("press enter when done")
+
+#########################################
+### clicking set-up
+#########################################
+
+class GetClick:
+ x = None
+ y = None
+ xy = None
+ done = False
+ def callback(self, event, x, y, flags, param):
+ if self.done:
+ return
+ elif event == cv2.EVENT_LBUTTONDOWN:
+ self.x = x
+ self.y = y
+ self.xy = (x,y)
+ self.done = True
+
+cv2.namedWindow("rgb")
+
+xyz = []
+xyz_tf = []
+rgb = []
+
+if args.object == "needle":
+ if args.mode == "test":
+ for i in range(3):
+ xyz_temp, rgb_temp = load_xyzrgb("pr2_suture_scene(needle_finding)%s.pcd"%i)
+ xyz.append(xyz_temp)
+ rgb.append(rgb_temp)
+
+ env = openravepy.Environment()
+ env.Load("robots/pr2-beta-static.zae")
+ robot = env.GetRobots()[0]
+ robot.SetDOFValues([1.5], [14])
+ hmat = robot.GetLink("narrow_stereo_gazebo_r_stereo_camera_optical_frame").GetTransform()
+
+ xyz1 = np.c_[xyz_temp.reshape(-1,3), np.ones((xyz_temp.size/3,1))]
+ xyz1_tf = np.dot(xyz1, hmat.T)
+ xyz_tf_temp = xyz1_tf[:,:3].reshape(xyz_temp.shape)
+ xyz_tf_temp[np.isnan(xyz_tf_temp)] = -2
+
+ xyz_tf.append(xyz_tf_temp)
+
+ del xyz_temp
+ del rgb_temp
+
+
+ elif args.mode == "live":
+ listener = ru.get_tf_listener()
+ for i in range(5):
+ print "waiting for messages on cloud topic %s"%args.cloud_topic
+ msg = rospy.wait_for_message(args.cloud_topic, sm.PointCloud2)
+ print "got msg %s!"%i
+
+ xyz_temp, rgb_temp = ru.pc2xyzrgb(msg)
+ if (xyz_temp.shape[0] == 1 or xyz_temp.shape[1] == 1): raise Exception("needs to be an organized point cloud")
+
+ xyz.append(xyz_temp)
+ rgb.append(rgb_temp)
+
+ xyz_tf_temp = ru.transform_points(xyz_temp, listener, "base_footprint", "/camera_rgb_optical_frame")
+ xyz_tf_temp[np.isnan(xyz_tf_temp)] = -2
+ xyz_tf.append(xyz_tf_temp)
+
+ del xyz_temp
+ del rgb_temp
+
+ rospy.sleep(1.0)
+
+ rgb_plot = rgb[0].copy()
+ find_needle(rgb_plot)
+
+elif args.object == "holes_cut":
+ if args.mode == "test":
+
+ env = openravepy.Environment()
+ env.Load("robots/pr2-beta-static.zae")
+ robot = env.GetRobots()[0]
+ robot.SetDOFValues([1.5], [14])
+ hmat = robot.GetLink("narrow_stereo_gazebo_r_stereo_camera_optical_frame").GetTransform()
+
+ xyz, rgb = load_xyzrgb("suture_scene(findholescut).pcd")
+
+ xyz1 = np.c_[xyz.reshape(-1,3), np.ones((xyz.size/3,1))]
+ xyz1_tf = np.dot(xyz1, hmat.T)
+ xyz_tf = xyz1_tf[:,:3].reshape(xyz.shape)
+ xyz_tf[np.isnan(xyz_tf)] = -2
+
+ elif args.mode == "live":
+ listener = ru.get_tf_listener()
+ print "waiting for messages on cloud topic %s"%args.cloud_topic
+ msg = rospy.wait_for_message(args.cloud_topic, sm.PointCloud2)
+
+ xyz, rgb = ru.pc2xyzrgb(msg)
+ if (xyz.shape[0] == 1 or xyz.shape[1] == 1): raise Exception("needs to be an organized point cloud")
+
+ xyz.append(xyz)
+ rgb.append(rgb)
+
+ xyz_tf = ru.transform_points(xyz, listener, "base_footprint", "/camera_rgb_optical_frame")
+ xyz_tf[np.isnan(xyz_tf)] = -2
+
+ rgb_plot = rgb.copy()
+
+ find_holes_cut(rgb_plot)
+
+
+
diff --git a/iros/dev/suture_scene(holes).pcd b/iros/dev/suture_scene(holes).pcd
new file mode 100644
index 0000000..b466c51
Binary files /dev/null and b/iros/dev/suture_scene(holes).pcd differ
diff --git a/iros/execute_suture_traj.py b/iros/execute_suture_traj.py
new file mode 100644
index 0000000..19b59aa
--- /dev/null
+++ b/iros/execute_suture_traj.py
@@ -0,0 +1,601 @@
+import argparse
+parser = argparse.ArgumentParser()
+parser.add_argument("mode", choices=["openrave", "gazebo", "reality"])
+parser.add_argument("--cloud_topic", default="/camera/depth_registered/points")
+parser.add_argument("task")
+parser.add_argument("part_index",type=int)
+parser.add_argument("--segment_index", type=int, default=0)
+parser.add_argument("--interactive",action="store_true")
+parser.add_argument("--no_movement",action="store_true")
+args = parser.parse_args()
+
+import numpy as np
+import trajoptpy
+import lfd
+import openravepy
+import json
+import trajoptpy.math_utils as mu
+import trajoptpy.kin_utils as ku
+import trajoptpy.make_kinbodies as mk
+import brett2.ros_utils as ru
+from jds_utils.colorize import colorize
+from jds_utils import conversions
+from jds_utils.yes_or_no import yes_or_no
+from brett2 import mytf
+import yaml
+import cv2
+
+import functools as ft
+import simple_clicker as sc
+import os
+import os.path as osp
+from glob import glob
+import subprocess, sys, time
+import sensor_msgs.msg as sm
+
+from time import sleep
+
+window_name = "Find Keypoints"
+cv2.namedWindow(window_name)
+
+#########################
+###### Set up
+#########################
+
+IROS_DATA_DIR = os.getenv("IROS_DATA_DIR")
+
+import iros
+IROS_DIR = osp.dirname(iros.__file__)
+task_file = osp.join(IROS_DIR, "suture_demos2.yaml")
+
+with open(osp.join(IROS_DATA_DIR,task_file),"r") as fh:
+ task_info = yaml.load(fh)
+
+jtf = osp.join(IROS_DATA_DIR, args.task, 'joint_trajectories', 'pt' + str(args.part_index))
+kpf = osp.join(IROS_DATA_DIR, args.task, 'keypoints', 'pt' + str(args.part_index))
+pcf = osp.join(IROS_DATA_DIR, args.task, 'point_clouds', 'pt' + str(args.part_index))
+
+if args.mode == "openrave":
+ env = openravepy.Environment()
+ env.StopSimulation()
+ env.Load("robots/pr2-beta-static.zae")
+ env.Load(osp.join(osp.dirname(lfd.__file__), "data/table2.xml"))
+ robot = env.GetRobots()[0]
+ torso_joint = robot.GetJoint("torso_lift_joint")
+ robot.SetDOFValues(torso_joint.GetLimits()[1], [torso_joint.GetDOFIndex()])
+
+else:
+ import rospy
+ from brett2.PR2 import PR2
+ if rospy.get_name() == "/unnamed": rospy.init_node("execute_suture", disable_signals=True)
+ rviz = ru.RvizWrapper()
+
+brett = PR2()
+env = brett.env
+robot = brett.robot
+if args.mode == "gazebo":
+ brett.torso.go_up()
+ rospy.sleep(1)
+brett.head.set_pan_tilt(0,1)
+brett.join_all()
+brett.update_rave()
+
+#needle_tip = mk.create_dummy_body(env, name="needle_tip")
+needle_tip = mk.create_spheres(env, [(0,0,0)], radii=.02, name = "needle_tip")
+
+demo_env=env.CloneSelf(1)
+demo_env.StopSimulation()
+demo_robot = demo_env.GetRobot("pr2")
+demo_needle_tip = demo_env.GetKinBody("needle_tip")
+
+trajoptpy.SetInteractive(args.interactive)
+
+if False:#args.mode == "reality":
+ table_bounds = map(float, rospy.get_param("table_bounds").split())
+ mk.create_box_from_bounds(env,table_bounds, name="table")
+else:
+ import lfd
+ env.Load(osp.join(osp.dirname(lfd.__file__), "data/table2.xml"))
+
+#######################
+
+from collections import namedtuple
+TrajSegment = namedtuple("TrajSegment", "larm_traj rarm_traj lgrip_angle rgrip_angle") # class to describe trajectory segments
+
+PARTNUM = args.part_index
+SEGNUM = len(task_info[args.task][args.part_index]["segments"])
+print 'number of segments', SEGNUM
+OPEN_ANGLE = .08
+CLOSED_ANGLE = 0
+
+
+def transform_hmats(f, hmats):
+ hmats = np.array(hmats)
+ oldpts_md = hmats[:,:3,3]
+ oldrots_mad = hmats[:,:3,:3]
+ newpts_mg, newrots_mgd = f.transform_frames(oldpts_md, oldrots_mad)
+ tf_hmats = hmats.copy()
+ tf_hmats[:,:3,:3] = newrots_mgd
+ tf_hmats[:,:3,3] = newpts_mg
+ return tf_hmats
+
+def translation_matrix(xyz):
+ out = np.eye(4)
+ out[:3,3] = xyz
+ return out
+
+def adaptive_resample(x, tol, max_change=None, min_steps=3):
+ """
+ resample original signal with a small number of waypoints so that the the sparsely sampled function,
+ when linearly interpolated, deviates from the original function by less than tol at every time
+
+ input:
+ x: 2D array in R^(t x k) where t is the number of timesteps
+ tol: tolerance. either a single scalar or a vector of length k
+ max_change: max change in the sparsely sampled signal at each timestep
+ min_steps: minimum number of timesteps in the new trajectory. (usually irrelevant)
+
+ output:
+ new_times, new_x
+
+ assuming that the old signal has times 0,1,2,...,len(x)-1
+ this gives the new times, and the new signal
+ """
+ x = np.asarray(x)
+ assert x.ndim == 2
+
+ if np.isscalar(tol):
+ tol = np.ones(x.shape[1])*tol
+ else:
+ tol = np.asarray(tol)
+ assert tol.ndim == 1 and tol.shape[0] == x.shape[1]
+
+ times = np.arange(x.shape[0])
+
+ if max_change is None:
+ max_change = np.ones(x.shape[1]) * np.inf
+ elif np.isscalar(max_change):
+ max_change = np.ones(x.shape[1]) * max_change
+ else:
+ max_change = np.asarray(max_change)
+ assert max_change.ndim == 1 and max_change.shape[0] == x.shape[1]
+
+ dl = mu.norms(x[1:] - x[:-1],1)
+ l = np.cumsum(np.r_[0,dl])
+
+ def bad_inds(x1, t1):
+ ibad = np.flatnonzero( (np.abs(mu.interp2d(l, l1, x1) - x) > tol).any(axis=1) )
+ jbad1 = np.flatnonzero((np.abs(x1[1:] - x1[:-1]) > max_change[None,:]).any(axis=1))
+ if len(ibad) == 0 and len(jbad1) == 0: return []
+ else:
+ lbad = l[ibad]
+ jbad = np.unique(np.searchsorted(l1, lbad)) - 1
+ jbad = np.union1d(jbad, jbad1)
+ return jbad
+
+ l1 = np.linspace(0,l[-1],min_steps)
+ for _ in xrange(20):
+ x1 = mu.interp2d(l1, l, x)
+ bi = bad_inds(x1, l1)
+ if len(bi) == 0:
+ return np.interp(l1, l, times), x1
+ else:
+ l1 = np.union1d(l1, (l1[bi] + l1[bi+1]) / 2 )
+
+
+ raise Exception("couldn't subdivide enough. something funny is going on. check your input data")
+
+
+def segment_trajectory(larm, rarm, lgrip, rgrip):
+
+ thresh = .04 # open/close threshold
+
+ n_steps = len(larm)
+ assert len(rarm)==n_steps
+ assert len(lgrip)==n_steps
+ assert len(rgrip)==n_steps
+
+
+ # indices BEFORE transition occurs
+ l_openings = np.flatnonzero((lgrip[1:] >= thresh) & (lgrip[:-1] < thresh))
+ r_openings = np.flatnonzero((rgrip[1:] >= thresh) & (rgrip[:-1] < thresh))
+ l_closings = np.flatnonzero((lgrip[1:] < thresh) & (lgrip[:-1] >= thresh))
+ r_closings = np.flatnonzero((rgrip[1:] < thresh) & (rgrip[:-1] >= thresh))
+
+ before_transitions = np.r_[l_openings, r_openings, l_closings, r_closings]
+ after_transitions = before_transitions+1
+ seg_starts = np.unique(np.r_[0, after_transitions])
+ seg_ends = np.unique(np.r_[before_transitions, n_steps])
+
+
+ def binarize_gripper(angle):
+ if angle > thresh: return OPEN_ANGLE
+ else: return CLOSED_ANGLE
+
+ traj_segments = []
+ for (i_start, i_end) in zip(seg_starts, seg_ends):
+ l_angle = binarize_gripper(lgrip[i_start])
+ r_angle = binarize_gripper(rgrip[i_start])
+ traj_segments.append(TrajSegment( larm[i_start:i_end+1], rarm[i_start:i_end+1], l_angle, r_angle ))
+
+ return traj_segments
+
+
+#def GetLinkMaybeAttached(robot,ee_link):
+ #link = robot.GetLink(ee_link)
+ #if link is not None: return link
+ #grabbed_bodies = robot.GetGrabbed()
+ #for grabbed_body in grabbed_bodies:
+ #link = grabbed_body.GetLink(ee_link)
+ #if link: return link
+ #return None
+
+def plan_follow_traj(robot, manip_name, ee_link, new_hmats, old_traj, other_manip_name = None, other_manip_traj = None):
+
+ n_steps = len(new_hmats)
+ assert old_traj.shape[0] == n_steps
+ assert old_traj.shape[1] == 7
+
+ arm_inds = robot.GetManipulator(manip_name).GetArmIndices()
+
+ ee_linkname = ee_link.GetName()
+
+ init_traj = old_traj.copy()
+ init_traj[0] = robot.GetDOFValues(arm_inds)
+
+ request = {
+ "basic_info" : {
+ "n_steps" : n_steps,
+ "manip" : manip_name,
+ "start_fixed" : True
+ },
+ "costs" : [
+ {
+ "type" : "joint_vel",
+ "params": {"coeffs" : [1]}
+ },
+ {
+ "type" : "collision",
+ "params" : {"coeffs" : [10],"dist_pen" : [0.005]}
+ }
+ ],
+ "constraints" : [
+ ],
+ "init_info" : {
+ "type":"given_traj",
+ "data":[x.tolist() for x in init_traj]
+ }
+ }
+ if other_manip_name is not None:
+ request["scene_states"] = []
+ other_dof_inds = robot.GetManipulator(other_manip_name).GetArmIndices()
+
+ poses = [openravepy.poseFromMatrix(hmat) for hmat in new_hmats]
+ for (i_step,pose) in enumerate(poses):
+ request["costs"].append(
+ {"type":"pose",
+ "params":{
+ "xyz":pose[4:7].tolist(),
+ "wxyz":pose[0:4].tolist(),
+ "link":ee_linkname,
+ "timestep":i_step,
+ "pos_coeffs":[15,15,15],
+ "rot_coeff":[0,0,0]
+ }
+ })
+ if other_manip_name is not None:
+ request["scene_states"].append(
+ {"timestep": i_step, "obj_states": [{"name": "pr2", "dof_vals":other_manip_traj[i_step].tolist(), "dof_inds":other_dof_inds.tolist()}] })
+
+
+ s = json.dumps(request)
+ prob = trajoptpy.ConstructProblem(s, env) # create object that stores optimization problem
+ result = trajoptpy.OptimizeProblem(prob) # do optimization
+ traj = result.GetTraj()
+
+ saver = openravepy.RobotStateSaver(robot)
+ pos_errs = []
+ for i_step in xrange(1,n_steps):
+ row = traj[i_step]
+ robot.SetDOFValues(row, arm_inds)
+ tf = ee_link.GetTransform()
+ pos = tf[:3,3]
+ pos_err = np.linalg.norm(poses[i_step][4:7] - pos)
+ pos_errs.append(pos_err)
+ pos_errs = np.array(pos_errs)
+
+ print "planned trajectory for %s. max position error: %.3f. all position errors: %s"%(manip_name, pos_errs.max(), pos_errs)
+
+ return traj
+
+
+
+
+#######################################
+###### Load demo from np files
+#######################################
+
+demo_keypts = np.load(osp.join(IROS_DATA_DIR, kpf + "/keypoints.npy"))
+
+def keyfunc(fname):
+ return int(osp.basename(fname).split("_")[0][3:]) # sort files with names like seg0_larm.npy
+
+lgrip_files, rgrip_files, larm_files, rarm_files = [sorted(glob(jtf + "/seg*%s.npy"%partname),
+ key = keyfunc)
+ for partname in ("lgrip", "rgrip", "larm", "rarm")]
+
+mini_segments = []
+
+for s in range(SEGNUM):
+ mini_segments.append( segment_trajectory( np.load(larm_files[s]),
+ np.load(rarm_files[s]),
+ np.load(lgrip_files[s]),
+ np.load(rgrip_files[s])))
+
+ print "trajectory segment", str(s), "broken into %i mini-segment(s) by gripper transitions"%len(mini_segments[s])
+
+#raw_input("press enter to execute")
+#######################################
+
+### iterate through each 'look' segment; for each segment:
+### click on key points
+### compare to key points from demonstration, do registration, find warping function
+### iterate through each mini-segment
+### downsample
+### find cartestian coordinates for trajectory
+### transform cartestian trajectory according to warping function
+### do ik to find new joint space trajectory (left arm first, then right arm)
+### execute new trajectory
+
+listener = ru.get_tf_listener()
+handles = []
+exec_keypts = {}
+
+if args.segment_index > 0: #HACK so we can start in the middle
+ start_segment_info = task_info[args.task][args.part_index]["segments"][args.segment_index]
+ if "left_end_effector" in start_segment_info:
+ needle_tip.SetTransform(robot.GetManipulator("leftarm").GetTransform())
+ robot.Grab(needle_tip)
+ demo_needle_tip.SetTransform(demo_robot.GetManipulator("leftarm").GetTransform())
+ demo_robot.Grab(demo_needle_tip)
+
+for s in range(args.segment_index, SEGNUM):
+
+ snapshot_count = 0
+ while True:
+ segment_info = task_info[args.task][args.part_index]["segments"][s]
+ keypt_names = segment_info["keypts_to_look_for"]
+ num_kps = len(keypt_names)
+
+ print colorize("trajectory segment %i"%s, 'blue', bold=True, highlight=True)
+
+ # keep track of keypts seen during each segment
+ exec_keypts[s] = {}
+ exec_keypts[s]["names"] = []
+ exec_keypts[s]["locations"] = []
+
+ # this is the frame whose trajectory we'll adapt to the new situation
+ # in some segments it's the needle tip
+ left_ee_linkname = segment_info.get("left_end_effector", "l_gripper_tool_frame")
+ right_ee_linkname = segment_info.get("right_end_effector", "r_gripper_tool_frame")
+
+ if left_ee_linkname == "needle_tip":
+ left_ee_link = needle_tip.GetLinks()[0]
+ demo_left_ee_link = demo_needle_tip
+ else:
+ left_ee_link = robot.GetLink(left_ee_linkname)
+ demo_left_ee_link = demo_robot.GetLink(left_ee_linkname)
+ if right_ee_linkname == "needle_tip":
+ right_ee_link = needle_tip.GetLinks()[0]
+ demo_right_ee_link = demo_needle_tip
+ else:
+ right_ee_link = robot.GetLink(right_ee_linkname)
+ demo_right_ee_link = demo_robot.GetLink(right_ee_linkname)
+
+ brett.update_rave()
+
+ print colorize("Key points from demo: %s"%keypt_names, 'green', bold=True)
+ time.sleep(1) #time.sleep or rospy.sleep??
+
+ #if args.cloud_topic == 'test':
+ #xyz_tf = np.load(pcf + '/seg%s_' + keypt_names[k] + '_xyz_tf.npy'%s)
+ #rgb_plot = np.load(pcf + '/seg%s_' + keypt_names[k] + '_rgb_pl.npy'%s)
+ #kp_loc = sc.find_kp(keypt_names[k], xyz_tf, rgb_plot, window_name)
+
+ if keypt_names[0] == 'tip_transform': # this is segment where robot looks for tip
+ demo_needle_tip_loc = np.load(osp.join(IROS_DATA_DIR, kpf, "seg%s_needle_world_loc.npy"%s))
+ exec_needle_tip_loc = sc.get_kp_locations(keypt_names, exec_keypts, s, args.cloud_topic)
+
+ exec_keypts[s]["locations"].append((0, 0, 0))
+
+ def grab_needle_tip(lr):
+ for demo in [False, True]:
+ if demo:
+ tip_loc = demo_needle_tip_loc
+ grabbing_robot = demo_robot
+ grabbed_needle_tip = demo_needle_tip
+ else:
+ tip_loc = exec_needle_tip_loc
+ grabbing_robot = robot
+ grabbed_needle_tip = needle_tip
+ grabbed_needle_tip.SetTransform(translation_matrix(tip_loc))
+ grabbing_robot.Grab(grabbed_needle_tip, grabbing_robot.GetLink("%s_gripper_tool_frame"%lr))
+
+ if "extra_info" in segment_info:
+ if "left_grab" in segment_info["extra_info"]:
+ grab_needle_tip('l')
+ elif "right_grab" in segment_info["extra_info"]:
+ grab_needle_tip('r')
+
+ else:
+ keypt_locs = sc.get_kp_locations(keypt_names, exec_keypts, s, args.cloud_topic)
+ exec_keypts[s]["locations"] = keypt_locs
+
+ for (n, name) in enumerate(keypt_names): exec_keypts[s]["names"].append(keypt_names[n])
+
+ rgbfile = glob(osp.join(IROS_DATA_DIR, args.task, 'point_clouds', 'pt%i/seg%i_*_rgb_*.npy'%(PARTNUM, s)))[0]
+ xyzfile = glob(osp.join(IROS_DATA_DIR, args.task, 'point_clouds', 'pt%i/seg%i_*_xyz_tf*.npy'%(PARTNUM, s)))[0]
+
+ if keypt_names[0] not in [ "needle_end", "needle_tip", "razor", "tip_transform"]:
+ np.savez(osp.join(IROS_DATA_DIR, "segment%.2i_snapshot%.2i_time%i"%(s,snapshot_count,int(time.time()))),
+ demo_rgb = np.load(rgbfile),
+ demo_xyz = np.load(xyzfile),
+ current_rgb = np.load("/tmp/rgb.npy"),
+ current_xyz = np.load("/tmp/xyz_tf.npy"),
+ keypts_names = keypt_names,
+ demo_keypts = demo_keypts[s],
+ exec_keypts = exec_keypts[s]["locations"]
+ )
+ snapshot_count += 1
+ if yes_or_no("done with snapshots?"):
+ print colorize("going on to next segment","red")
+ break
+ else:
+ print colorize("this segment doesn't have image keypoints. moving on","red")
+ break
+ print colorize("acquiring another snapshot", "blue")
+
+ demopoints_m3 = np.array(demo_keypts[s])
+ newpoints_m3 = np.array(exec_keypts[s]["locations"])
+
+ if args.mode in ["gazebo", "reality"]:
+ handles = []
+ pose_array = conversions.array_to_pose_array(demopoints_m3, 'base_footprint')
+ handles.append(rviz.draw_curve(pose_array, rgba = (1,0,0,1),width=.02,type=ru.Marker.CUBE_LIST))
+ pose_array = conversions.array_to_pose_array(newpoints_m3, 'base_footprint')
+ handles.append(rviz.draw_curve(pose_array, rgba = (0,0,1,1),width=.02,type=ru.Marker.CUBE_LIST))
+
+ from lfd import registration
+ f = registration.ThinPlateSpline()
+ #f.fit(demopoints_m3, newpoints_m3, 10,10)
+ f.fit(demopoints_m3, newpoints_m3, bend_coef=10,rot_coef=.01)
+ np.set_printoptions(precision=3)
+ print "nonlinear part", f.w_ng
+ print "affine part", f.lin_ag
+ print "translation part", f.trans_g
+ print "residual", f.transform_points(demopoints_m3) - newpoints_m3
+
+
+ for (i, mini_segment) in enumerate(mini_segments[s]):
+
+ brett.update_rave()
+
+ print colorize("mini-segment %i"%i, 'red', bold=True, highlight=True)
+
+ full_traj = np.c_[mini_segment.larm_traj, mini_segment.rarm_traj]
+ full_traj = mu.remove_duplicate_rows(full_traj)
+ orig_times = np.arange(len(full_traj))
+
+ ### downsample the trajectory
+ ds_times, ds_traj = adaptive_resample(full_traj, tol=.01, max_change=.1) # about 2.5 degrees, 10 degrees
+ n_steps = len(ds_traj)
+
+ ################################################
+ ### This part gets the cartesian trajectory
+ ################################################
+
+ robot.SetActiveDOFs(np.r_[robot.GetManipulator("leftarm").GetArmIndices(), robot.GetManipulator("rightarm").GetArmIndices()])
+ demo_robot.SetActiveDOFs(np.r_[robot.GetManipulator("leftarm").GetArmIndices(), robot.GetManipulator("rightarm").GetArmIndices()])
+
+ demo_robot.SetDOFValues(robot.GetDOFValues())
+
+ # let's get cartesian trajectory
+ left_hmats = []
+ right_hmats = []
+
+
+ for row in ds_traj:
+ demo_robot.SetActiveDOFValues(row)
+ left_hmats.append(demo_left_ee_link.GetTransform())
+ right_hmats.append(demo_right_ee_link.GetTransform())
+
+
+ left_hmats_old = left_hmats
+ left_hmats = transform_hmats(f, left_hmats)
+ right_hmats_old = right_hmats
+ right_hmats = transform_hmats(f, right_hmats)
+
+
+ if args.mode in ["gazebo", "reality"]:
+ pose_array = conversions.array_to_pose_array(np.array(left_hmats_old)[:,:3,3], 'base_footprint')
+ handles.append(rviz.draw_curve(pose_array, rgba = (1,0,0,1),width=.005,type=ru.Marker.LINE_STRIP))
+ pose_array = conversions.array_to_pose_array(np.array(right_hmats_old)[:,:3,3], 'base_footprint')
+ handles.append(rviz.draw_curve(pose_array, rgba = (1,0,0,1),width=.005,type=ru.Marker.LINE_STRIP))
+
+ pose_array = conversions.array_to_pose_array(np.array(left_hmats)[:,:3,3], 'base_footprint')
+ handles.append(rviz.draw_curve(pose_array, rgba = (0,0,1,1),width=.005,type=ru.Marker.LINE_STRIP))
+ pose_array = conversions.array_to_pose_array(np.array(right_hmats)[:,:3,3], 'base_footprint')
+ handles.append(rviz.draw_curve(pose_array, rgba = (0,0,1,1),width=.005,type=ru.Marker.LINE_STRIP))
+
+
+ ################################################
+
+ leftarm_inds = robot.GetManipulator("leftarm").GetArmIndices()
+ rightarm_inds = robot.GetManipulator("rightarm").GetArmIndices()
+
+ def remove_winding(arm_traj, current_arm_vals):
+ arm_traj = arm_traj.copy()
+ for i in [2,4,6]:
+ dof_jump = current_arm_vals[i] - arm_traj[0,i]
+ winds = np.round(dof_jump / (2*np.pi))
+ if winds > 0: print "unwound joint %i by 2pi*%i"%(i,winds)
+ arm_traj[:,i] += winds * 2 * np.pi
+ return arm_traj
+
+
+ seg_arms = task_info[args.task][args.part_index]["segments"][s]["mini-segments"][i]["arms_used"]
+
+ if 'l' in seg_arms:
+ best_left_path = plan_follow_traj(robot, "leftarm", left_ee_link, left_hmats, remove_winding(ds_traj[:,:7], robot.GetDOFValues(leftarm_inds)))
+ else:
+ best_left_path = ds_traj[:,:7]
+ if 'r' in seg_arms:
+ best_right_path = plan_follow_traj(robot, "rightarm", right_ee_link, right_hmats, remove_winding(ds_traj[:,7:], robot.GetDOFValues(rightarm_inds)), "leftarm", best_left_path)
+ else:
+ best_right_path = ds_traj[:,7:]
+
+
+ left_diffs = np.abs(best_left_path[1:] - best_left_path[:-1])
+ right_diffs = np.abs(best_right_path[1:] - best_right_path[:-1])
+ print "max joint discontinuities in left arm:", left_diffs.max(), "per joint: ", left_diffs.max(axis=0)
+ print "max joint discontinuities in right arm:", right_diffs.max(), "per joint: ", right_diffs.max(axis=0)
+
+ print(colorize("look at markers in rviz. red=demo, blue=new. press enter to continue","red"))
+ raw_input()
+
+ ######################################
+ ### Now view/execute the trajectory
+ ######################################
+
+ if args.mode == "openrave":
+ viewer = trajoptpy.GetViewer(env)
+ joint_traj = np.c_[best_left_path, best_right_path]
+ # *5 factor is due to an openrave oddity
+ robot.SetDOFValues([mini_segment.lgrip_angle*5, mini_segment.rgrip_angle*5], [robot.GetJoint("l_gripper_l_finger_joint").GetDOFIndex(), robot.GetJoint("r_gripper_l_finger_joint").GetDOFIndex()], False)
+ for (i,row) in enumerate(joint_traj):
+ print "step",i
+ robot.SetActiveDOFValues(row)
+ lhandle = env.drawarrow(left_ee_link.GetTransform()[:3,3], left_hmats[i][:3,3])
+ rhandle = env.drawarrow(right_ee_link.GetTransform()[:3,3], right_hmats[i][:3,3])
+ viewer.Idle()
+ else:
+ from brett2 import trajectories
+ #def follow_body_traj2(pr2, bodypart2traj, times=None, wait=True, base_frame = "/base_footprint"):
+ bodypart2traj = {}
+ brett.lgrip.set_angle(mini_segment.lgrip_angle)
+ brett.rgrip.set_angle(mini_segment.rgrip_angle)
+ brett.join_all()
+ sleep(.4)
+ if 'l' in seg_arms:
+ bodypart2traj["l_arm"] = best_left_path
+ else:
+ print colorize("skipping left arm", 'yellow', bold=True, highlight=True)
+ if 'r' in seg_arms:
+ bodypart2traj["r_arm"] = best_right_path
+ else:
+ print colorize("skipping right arm", 'yellow', bold=True, highlight=True)
+
+ if args.no_movement:
+ print colorize("skipping arm movement","yellow",bold=True,highlight=True)
+ else:
+ trajectories.follow_body_traj2(brett, bodypart2traj, speed_factor=.5)
\ No newline at end of file
diff --git a/iros/feature_utils.py b/iros/feature_utils.py
new file mode 100644
index 0000000..5f250ef
--- /dev/null
+++ b/iros/feature_utils.py
@@ -0,0 +1,237 @@
+import cv
+import cv2
+import math
+import numpy as np
+import os
+import os.path as osp
+import scipy.ndimage as ndi
+#from jds_image_proc.pcd_io import load_xyzrgb
+
+def automatic_find_holes(img, task):
+ #xyz, rgb = load_xyzrgb("suture_scene(holes).pcd")
+ #img = rgb.copy()
+
+ SIZEMIN = 21
+ SIZEMAX = 33
+
+ COLMIN = 200
+ COLMAX = 500
+
+ d_red = cv2.cv.RGB(150, 55, 65)
+ l_red = cv2.cv.RGB(250, 200, 200)
+ d_blue = cv2.cv.RGB(40, 55, 200)
+
+ cv2.namedWindow("rgb")
+ cv2.imshow("rgb", img)
+ cv2.waitKey(10)
+
+ gray_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
+
+ detector = cv2.FeatureDetector_create('MSER')
+ fs = detector.detect(gray_img)
+ fs.sort(key = lambda x: -x.size)
+
+ def supress(x):
+ for f in fs:
+ distx = f.pt[0] - x.pt[0]
+ disty = f.pt[1] - x.pt[1]
+ dist = math.sqrt(distx*distx + disty*disty)
+ if (f.size > x.size) and (dist < f.size/2):
+ return True
+
+ def fil(f):
+ if (f.size > SIZEMIN) and (f.size < SIZEMAX) and (f.pt[0] > COLMIN) and (f.pt[0] < COLMAX):
+ return True
+
+ sfs = [x for x in fs if not supress(x)]
+ circles = [c for c in sfs if fil(c)]
+ circles.sort(key = lambda c: c.pt[1], reverse=True)
+
+ horiz_pairs = []
+ diag_pairs = []
+ for i in range(len(circles)/2):
+ horiz_pairs.append([])
+ horiz_pairs[i].append(circles[2*i])
+ horiz_pairs[i].append(circles[(2*i)+1])
+
+ for i in range(len(horiz_pairs)):
+ horiz_pairs[i].sort(key = lambda c: c.pt[0])
+
+ for i in range(len(horiz_pairs)-1):
+ diag_pairs.append([])
+ diag_pairs[i].append(horiz_pairs[i][1])
+ diag_pairs[i].append(horiz_pairs[i+1][0])
+
+ print 'num circles', len(circles)
+ print 'num horiz_pairs', len(horiz_pairs)
+ print 'num diag_pairs', len(diag_pairs)
+
+ cv2.namedWindow("rgb with circles")
+ cv2.imshow("rgb with circles", img)
+ cv2.waitKey(100)
+
+ for p in range(len(horiz_pairs)):
+ for c in horiz_pairs[p]:
+ cv2.circle(img, (int(c.pt[0]), int(c.pt[1])), int(c.size/2), d_red, 2, cv2.CV_AA)
+ cv2.circle(img, (int(c.pt[0]), int(c.pt[1])), int(c.size/2), d_blue, 1, cv2.CV_AA)
+ cv2.imshow("rgb with circles", img)
+ cv2.waitKey(10)
+ #raw_input("Press enter for the next circle...")
+
+ #d = 0
+ #for p in range(len(diag_pairs)):
+ #for c in diag_pairs[p]:
+ #d += 1
+ #cv2.circle(img, (int(c.pt[0]), int(c.pt[1])), int(c.size/2), d_red, 2, cv2.CV_AA)
+ #cv2.circle(img, (int(c.pt[0]), int(c.pt[1])), int(c.size/2), d_blue, 1, cv2.CV_AA)
+ #cv2.imshow("rgb with circles", img)
+ #cv2.waitKey(10)
+ #raw_input("Circle %i. Press enter to continue..."%d)
+
+ cv2.imshow("rgb with circles", img)
+ cv2.waitKey(100)
+ raw_input("Press enter to finish...")
+ cv2.destroyAllWindows()
+
+ if task == 'InterruptedSuture':
+ return horiz_pairs
+ elif task == 'RunningSuture':
+ return diag_pairs
+ else:
+ return None
+
+
+
+def automatic_find_cut(img):
+ #xyz, rgb = load_xyzrgb("suture_scene(holes).pcd")
+ #img = rgb.copy()
+
+ gray_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
+ edges = cv2.Canny(gray_img, 80, 240)
+ cv2.imshow("edges", edges)
+ cv2.waitKey(100)
+
+ lines = cv2.HoughLinesP(edges, 1, cv.CV_PI/360, 1, minLineLength = 50, maxLineGap = 5)
+
+ vert_lines = [l for l in lines[0] if (math.atan2(abs(l[0] - l[2]) , abs(l[1] - l[3])) == 0.0)]
+
+ for l in vert_lines:
+ cv2.line(img, (l[0], l[1]), (l[2], l[3]), clr, 2)
+
+ #cv2.imshow("rgb with vertical lines", img)
+ #cv2.waitKey(100)
+
+ for l in lines[0]:
+ cv2.line(img, (l[0], l[1]), (l[2], l[3]), clr, 2)
+
+ cv2.imshow("rgb with all lines", img)
+ cv2.waitKey(100)
+
+ cut_line = vert_line[0]
+
+ return cut_line
+
+
+#########################################
+### feature matching
+#########################################
+from glob import glob
+PART_NUM = 0
+SEG_NUM = 2
+
+IROS_DATA_DIR = os.getenv("IROS_DATA_DIR")
+task1 = 'InterruptedSuture6'
+task2 = 'InterruptedSuture7'
+pcf1 = glob(osp.join(IROS_DATA_DIR, 'point_clouds', task1, 'pt%iseg%i_*_rgb_*.npy'%(PART_NUM, SEG_NUM)))[0]
+pcf2 = glob(osp.join(IROS_DATA_DIR, 'point_clouds', task2, 'pt%iseg%i_*_rgb_*.npy'%(PART_NUM, SEG_NUM)))[0]
+
+xyzfile1 = glob(osp.join(IROS_DATA_DIR, 'point_clouds', task1, 'pt%iseg%i_*_xyz_tf*.npy'%(PART_NUM, SEG_NUM)))[0]
+xyzfile2 = glob(osp.join(IROS_DATA_DIR, 'point_clouds', task2, 'pt%iseg%i_*_xyz_tf*.npy'%(PART_NUM, SEG_NUM)))[0]
+
+kpf1 = osp.join(IROS_DATA_DIR, 'key_points', task1)
+kpf2 = osp.join(IROS_DATA_DIR, 'key_points', task2)
+
+#xyz1, rgb1 = load_xyzrgb("suture_scene0.pcd")
+#xyz2, rgb2 = load_xyzrgb("suture_scene1.pcd")
+
+rgb1 = np.load(pcf1)
+rgb2 = np.load(pcf2)
+xyz1 = np.load(xyzfile1)
+xyz2 = np.load(xyzfile2)
+if xyz1.ndim == 4: xyz1 = xyz1[0]
+if xyz2.ndim == 4: xyz2 = xyz2[0]
+if rgb1.ndim == 4: rgb1 = rgb1[0]
+if rgb2.ndim == 4: rgb2 = rgb2[0]
+
+kpts1 = np.load(kpf1 + '/pt%i_keypoints.npy'%PART_NUM)
+kpts2 = np.load(kpf2 + '/pt%i_keypoints.npy'%PART_NUM)
+kpts_names1 = np.load(kpf1 + '/pt%i_keypoints_names.npy'%PART_NUM)
+kpts_names2 = np.load(kpf2 + '/pt%i_keypoints_names.npy'%PART_NUM)
+
+
+kps1 = kpts1[SEG_NUM]
+kps2 = kpts2[SEG_NUM]
+
+
+print 'rgb1 kps', kps1
+print 'rgb2 kps', kps2
+
+
+def xyz2rc(xyz, xyz_img):
+ diffs_rc = ((xyz_img - xyz)**2).sum(axis=2)
+ diffs_rc[np.isnan(diffs_rc)] = 1e100
+ mindist = diffs_rc.min()
+ if mindist > 1e-4:
+ print "warning: nonzero minimum distance:",mindist
+ return np.unravel_index(diffs_rc.argmin(), diffs_rc.shape)
+
+
+RED = (0,0,255)
+GREEN = (0,255,0)
+BLUE = (255,0,0)
+YELLOW = (0,255,255)
+
+rcs1 = [xyz2rc(xyz, xyz1) for xyz in kps1]
+rcs2 = [xyz2rc(xyz, xyz2) for xyz in kps2]
+
+font = cv.InitFont(cv.CV_FONT_HERSHEY_SIMPLEX, 1, 1, 0, 3, 8)
+
+rgb1_plot = rgb1.copy()
+rgb2_plot = rgb2.copy()
+
+import image_registration as ir
+
+pred_rcs2 = ir.register_images(rgb1, rgb2, [(c,r) for (r,c) in rcs1])
+
+for (i_kp, (r,c)) in enumerate(rcs1):
+ cv2.putText(rgb1_plot, str(i_kp), (c,r), cv2.FONT_HERSHEY_PLAIN, 1.0, RED, thickness = 2)
+
+ _xys, _vs = ir.get_matches(rgb1, (c,r), rgb2, max_num_matches=1)
+ (cpredlocal, rpredlocal) = _xys[0]
+ rgroundtruth, cgroundtruth = rcs2[i_kp]
+
+ cv2.putText(rgb2_plot, str(i_kp), (c,r), cv2.FONT_HERSHEY_PLAIN, 1.0, RED, thickness = 2)
+ cv2.putText(rgb2_plot, str(i_kp), (cgroundtruth,rgroundtruth), cv2.FONT_HERSHEY_PLAIN, 1.0, BLUE, thickness = 2)
+ cv2.putText(rgb2_plot, str(i_kp), (cpredlocal,rpredlocal), cv2.FONT_HERSHEY_PLAIN, 1.0, GREEN, thickness = 2)
+
+ cpredglobal, rpredglobal = pred_rcs2[i_kp]
+ cv2.putText(rgb2_plot, str(i_kp), (cpredglobal,rpredglobal), cv2.FONT_HERSHEY_PLAIN, 1.0, YELLOW, thickness = 2)
+
+
+
+
+cv2.imshow("rgb1", rgb1_plot)
+cv2.waitKey(100)
+
+cv2.imshow("rgb2", rgb2_plot)
+cv2.waitKey(100)
+
+
+cv2.startWindowThread()
+
+print """
+Red: original coordinates of keypoints in rgb1
+Blue: ground truth (human labeling) of keypoints
+Green: keypoint prediction using only NCC (local)
+Yellow: keypoint prediction using NCC + dynamic programming
+"""
diff --git a/iros/follow_pose_traj.py b/iros/follow_pose_traj.py
new file mode 100644
index 0000000..3e6b3f8
--- /dev/null
+++ b/iros/follow_pose_traj.py
@@ -0,0 +1,331 @@
+import argparse
+parser = argparse.ArgumentParser()
+parser.add_argument("mode", choices=["openrave", "gazebo", "reality"])
+args = parser.parse_args()
+
+import trajoptpy
+import openravepy
+from openravepy import planningutils
+import numpy as np
+import json
+import trajoptpy.math_utils as mu
+import trajoptpy.kin_utils as ku
+import trajoptpy.make_kinbodies as mk
+
+import functools as ft
+
+import os
+import os.path as osp
+from glob import glob
+
+#########################
+## Set up
+#########################
+
+if args.mode == "openrave":
+ env = openravepy.Environment()
+ env.StopSimulation()
+ env.Load("robots/pr2-beta-static.zae")
+ import lfd
+ env.Load(osp.join(osp.dirname(lfd.__file__), "data/table2.xml"))
+ robot = env.GetRobots()[0]
+ torso_joint = robot.GetJoint("torso_lift_joint")
+ robot.SetDOFValues(torso_joint.GetLimits()[1], [torso_joint.GetDOFIndex()])
+else:
+ import rospy
+ from brett2.PR2 import PR2
+ rospy.init_node("follow_pose_traj",disable_signals=True)
+ brett = PR2()
+ env = brett.env
+ robot = brett.robot
+ if args.mode == "gazebo":
+ brett.torso.go_up()
+ rospy.sleep(1)
+ brett.update_rave()
+
+ if False:#args.mode == "reality":
+ table_bounds = map(float, rospy.get_param("table_bounds").split())
+ mk.create_box_from_bounds(env,table_bounds, name="table")
+ else:
+ import lfd
+ env.Load(osp.join(osp.dirname(lfd.__file__), "data/table2.xml"))
+
+#######################
+def retime_traj(traj, robot):
+ traj_obj = openravepy.RaveCreateTrajectory(env,'')
+ traj_obj.Init(robot.GetActiveConfigurationSpecification())
+ for row in traj:
+ traj_obj.Insert(0,row)
+ planningutils.RetimeActiveDOFTrajectory(traj_obj,robot,hastimestamps=False,maxvelmult=1,maxaccelmult=1,plannername='ParabolicTrajectoryRetimer')
+ new_traj = []
+ durations = []
+
+ for i in xrange(traj_obj.GetNumWaypoints()):
+ new_traj.append(traj_obj.GetWaypoint(i)[:14])
+ durations.append(traj_obj.GetWaypoint(i)[-1])
+ return np.array(new_traj), np.cumsum(durations)
+
+def adaptive_resample(x, tol, max_change=None, min_steps=3):
+ """
+ resample original signal it with a small number of waypoints so that the the sparsely sampled function, when linearly interpolated,
+ deviates from the original function by less than tol at every time
+
+ input:
+ x: 2D array in R^(t x k) where t is the number of timesteps
+ tol: tolerance. either a single scalar or a vector of length k
+ max_change: max change in the sparsely sampled signal at each timestep
+ min_steps: minimum number of timesteps in the new trajectory. (usually irrelevant)
+
+ output:
+ new_times, new_x
+
+ assuming that the old signal has times 0,1,2,...,len(x)-1
+ this gives the new times, and the new signal
+ """
+ x = np.asarray(x)
+ assert x.ndim == 2
+
+ if np.isscalar(tol):
+ tol = np.ones(x.shape[1])*tol
+ else:
+ tol = np.asarray(tol)
+ assert tol.ndim == 1 and tol.shape[0] == x.shape[1]
+
+ times = np.arange(x.shape[0])
+
+ if max_change is None:
+ max_change = np.ones(x.shape[1]) * np.inf
+ elif np.isscalar(max_change):
+ max_change = np.ones(x.shape[1]) * max_change
+ else:
+ max_change = np.asarray(max_change)
+ assert max_change.ndim == 1 and max_change.shape[0] == x.shape[1]
+
+ dl = mu.norms(x[1:] - x[:-1],1)
+ l = np.cumsum(np.r_[0,dl])
+
+ def bad_inds(x1, t1):
+ ibad = np.flatnonzero( (np.abs(mu.interp2d(l, l1, x1) - x) > tol).any(axis=1) )
+ jbad1 = np.flatnonzero((np.abs(x1[1:] - x1[:-1]) > max_change[None,:]).any(axis=1))
+ if len(ibad) == 0 and len(jbad1) == 0: return []
+ else:
+ lbad = l[ibad]
+ jbad = np.unique(np.searchsorted(l1, lbad)) - 1
+ jbad = np.union1d(jbad, jbad1)
+ return jbad
+
+
+ l1 = np.linspace(0,l[-1],min_steps)
+ for _ in xrange(20):
+ x1 = mu.interp2d(l1, l, x)
+ bi = bad_inds(x1, l1)
+ if len(bi) == 0:
+ return np.interp(l1, l, times), x1
+ else:
+ l1 = np.union1d(l1, (l1[bi] + l1[bi+1]) / 2 )
+
+
+ raise Exception("couldn't subdivide enough. something funny is going on. check oyur input data")
+
+
+def plan_follow_traj(robot, manip_name, new_hmats, old_traj):
+
+ n_steps = len(new_hmats)
+ assert old_traj.shape[0] == n_steps
+ assert old_traj.shape[1] == 7
+
+ init_traj = old_traj.copy()
+ init_traj[0] = robot.GetDOFValues(robot.GetManipulator(manip_name).GetArmIndices())
+
+ request = {
+ "basic_info" : {
+ "n_steps" : n_steps,
+ "manip" : manip_name,
+ "start_fixed" : True
+ },
+ "costs" : [
+ {
+ "type" : "joint_vel",
+ "params": {"coeffs" : [1]}
+ },
+ {
+ "type" : "collision",
+ "params" : {"coeffs" : [10],"dist_pen" : [0.025]}
+ }
+ ],
+ "constraints" : [
+ ],
+ "init_info" : {
+ "type":"given_traj",
+ "data":[x.tolist() for x in init_traj]
+ }
+ }
+
+
+ poses = [openravepy.poseFromMatrix(hmat) for hmat in new_hmats]
+ for (i_step,pose) in enumerate(poses):
+ request["costs"].append(
+ {"type":"pose",
+ "params":{
+ "xyz":pose[4:7].tolist(),
+ "wxyz":pose[0:4].tolist(),
+ "link":"%s_gripper_tool_frame"%manip_name[0],
+ "timestep":i_step
+ }
+ })
+
+
+ s = json.dumps(request)
+ prob = trajoptpy.ConstructProblem(s, env) # create object that stores optimization problem
+ result = trajoptpy.OptimizeProblem(prob) # do optimization
+ traj = result.GetTraj()
+
+ assert (robot.GetDOFLimits()[0][robot.GetManipulator(manip_name).GetArmIndices()] <= traj.min(axis=0)).all()
+ return traj
+
+###################################
+###### Load demonstration files
+###################################
+
+
+IROS_DATA_DIR = os.getenv("IROS_DATA_DIR")
+PARTNUM = 0
+SEGMENT_NUM = 0
+def keyfunc(fname): return int(osp.basename(fname).split("_")[0][3:]) # sort files with names like pt1_larm.npy
+lgrip_files, rgrip_files, larm_files, rarm_files = [sorted(glob(osp.join(IROS_DATA_DIR, "InterruptedSuture0/joint_trajectories/pt%i/seg*_%s.npy"%(PARTNUM, partname))),
+ key = keyfunc)
+ for partname in ("lgrip", "rgrip", "larm", "rarm")]
+
+
+######################################
+
+from collections import namedtuple
+TrajSegment = namedtuple("TrajSegment", "larm_traj rarm_traj lgrip_angle rgrip_angle") # class to describe trajectory segments
+
+
+
+OPEN_ANGLE = .08
+CLOSED_ANGLE = 0
+
+
+def segment_trajectory(larm, rarm, lgrip, rgrip):
+
+ thresh = .04 # open/close threshold
+
+ n_steps = len(larm)
+ assert len(rarm)==n_steps
+ assert len(lgrip)==n_steps
+ assert len(rgrip)==n_steps
+
+ # indices BEFORE transition occurs
+ l_openings = np.flatnonzero((lgrip[1:] >= thresh) & (lgrip[:-1] < thresh))
+ r_openings = np.flatnonzero((rgrip[1:] >= thresh) & (rgrip[:-1] < thresh))
+ l_closings = np.flatnonzero((lgrip[1:] < thresh) & (lgrip[:-1] >= thresh))
+ r_closings = np.flatnonzero((rgrip[1:] < thresh) & (rgrip[:-1] >= thresh))
+
+ before_transitions = np.r_[l_openings, r_openings, l_closings, r_closings]
+ after_transitions = before_transitions+1
+ seg_starts = np.unique1d(np.r_[0, after_transitions])
+ seg_ends = np.unique1d(np.r_[before_transitions, n_steps])
+
+
+
+ def binarize_gripper(angle):
+ if angle > thresh: return OPEN_ANGLE
+ else: return CLOSED_ANGLE
+
+ traj_segments = []
+ for (i_start, i_end) in zip(seg_starts, seg_ends):
+ l_angle = binarize_gripper(lgrip[i_start])
+ r_angle = binarize_gripper(rgrip[i_start])
+ traj_segments.append(TrajSegment( larm[i_start:i_end], rarm[i_start:i_end], l_angle, r_angle))
+ #import IPython
+ #IPython.embed()
+ return traj_segments
+
+segments = segment_trajectory(
+ np.load(larm_files[SEGMENT_NUM]),
+ np.load(rarm_files[SEGMENT_NUM]),
+ np.load(lgrip_files[SEGMENT_NUM]),
+ np.load(rgrip_files[SEGMENT_NUM]))
+
+print "trajectory broken into %i segments by gripper transitions"%len(segments)
+
+robot.SetDOFValues(segments[0].larm_traj[0], robot.GetManipulator("leftarm").GetArmIndices())
+robot.SetDOFValues(segments[0].rarm_traj[0], robot.GetManipulator("rightarm").GetArmIndices())
+
+for (i,segment) in enumerate(segments):
+ print "trajectory segment %i"%i
+
+ full_traj = np.c_[segment.larm_traj, segment.rarm_traj]
+ full_traj = mu.remove_duplicate_rows(full_traj)
+ orig_times = np.arange(len(full_traj))
+ ds_times, ds_traj = adaptive_resample(full_traj, tol=.01, max_change=.1) # about 2.5 degrees, 10 degrees
+ n_steps = len(ds_traj)
+
+
+
+ ####################
+ ### This part just gets the cartesian trajectory
+ ####################
+
+ robot.SetActiveDOFs(np.r_[robot.GetManipulator("leftarm").GetArmIndices(), robot.GetManipulator("rightarm").GetArmIndices()])
+ # let's get cartesian trajectory
+ left_hmats = []
+ right_hmats = []
+ saver = openravepy.RobotStateSaver(robot)
+ for row in ds_traj:
+ robot.SetActiveDOFValues(row)
+ left_hmats.append(robot.GetLink("l_gripper_tool_frame").GetTransform())
+ right_hmats.append(robot.GetLink("r_gripper_tool_frame").GetTransform())
+ saver.Restore()
+ saver.Release()
+
+ # now let's shift it a little
+ for i in xrange(n_steps):
+ left_hmats[i][0,3] -= .02
+ right_hmats[i][0,3] -= .02
+
+ ###################
+
+
+ trajoptpy.SetInteractive(False)
+
+
+ best_left_path = plan_follow_traj(robot, "leftarm", left_hmats, ds_traj[:,:7])
+ best_right_path = plan_follow_traj(robot, "rightarm", right_hmats, ds_traj[:,7:])
+
+
+ left_diffs = np.abs(best_left_path[1:] - best_left_path[:-1])
+ right_diffs = np.abs(best_right_path[1:] - best_right_path[:-1])
+ print "max joint discontinuities in left arm:", left_diffs.max(), "per joint: ", left_diffs.max(axis=0)
+ print "max joint discontinuities in right arm:", right_diffs.max(), "per joint: ", right_diffs.max(axis=0)
+
+ ##################################
+ #### Now view/execute the trajectory
+ ##################################
+
+ if args.mode == "openrave":
+ robot.SetActiveDOFs(np.r_[robot.GetManipulator("leftarm").GetArmIndices(), robot.GetManipulator("rightarm").GetArmIndices()])
+ viewer = trajoptpy.GetViewer(env)
+ joint_traj = np.c_[best_left_path, best_right_path]
+ joints, times = retime_traj(joint_traj,robot)
+ # *5 factor is due to an openrave oddity
+ robot.SetDOFValues([segment.lgrip_angle*5, segment.rgrip_angle*5], [robot.GetJoint("l_gripper_l_finger_joint").GetDOFIndex(), robot.GetJoint("r_gripper_l_finger_joint").GetDOFIndex()], False)
+ for (i,row) in enumerate(joint_traj):
+ print "step",i
+ robot.SetActiveDOFValues(row)
+ lhandle = env.drawarrow(robot.GetLink("l_gripper_tool_frame").GetTransform()[:3,3], left_hmats[i][:3,3])
+ rhandle = env.drawarrow(robot.GetLink("r_gripper_tool_frame").GetTransform()[:3,3], right_hmats[i][:3,3])
+ viewer.Idle()
+ else:
+ from brett2 import trajectories
+ #def follow_body_traj2(pr2, bodypart2traj, times=None, wait=True, base_frame = "/base_footprint"):
+ bodypart2traj = {}
+ brett.lgrip.set_angle(segment.lgrip_angle)
+ brett.rgrip.set_angle(segment.rgrip_angle)
+ brett.join_all()
+ bodypart2traj["l_arm"] = segment.larm_traj
+ bodypart2traj["r_arm"] = segment.rarm_traj
+ trajectories.follow_body_traj2(brett, bodypart2traj)
+
diff --git a/iros/image_registration.py b/iros/image_registration.py
new file mode 100644
index 0000000..061a615
--- /dev/null
+++ b/iros/image_registration.py
@@ -0,0 +1,181 @@
+import numpy as np
+import cv2
+import scipy.ndimage as ndi
+import itertools as it
+import scipy.spatial.distance as ssd
+import jds_utils.shortest_paths as sp
+import jds_utils.math_utils as mu
+
+DEBUG_PLOTTING = False
+PLOT_COUNTER = 0
+class Colors:
+ RED = (0,0,255)
+ GREEN = (0,255,0)
+ BLUE = (255,0,0)
+
+
+def register_images(src_img, targ_img, src_xys_n):
+ xys_n = []
+ vs_n = []
+ rankvs_n = []
+ for src_xy in src_xys_n:
+ targ_xys, vs = get_matches(src_img, src_xy, targ_img)
+ xys_n.append(targ_xys)
+ vs_n.append(vs)
+ rankvs_n.append(0*np.arange(len(vs)))
+
+ #matchinds_n = dp_match(xys_n, rankvs_n, src_xys_n)
+ matchinds_n = dp_match_ternary(xys_n, rankvs_n, src_xys_n)
+ return [xys_n[i][m] for (i,m) in enumerate(matchinds_n)]
+
+
+def dp_match(xys_n, vs_n, src_xys_n):
+ N = len(xys_n)
+ assert len(vs_n) == N
+ assert len(src_xys_n) == N
+
+ nodecosts = vs_n
+ edgecosts = []
+
+ for n in xrange(N-1):
+ xys_ad = xys_n[n]
+ xys_bd = xys_n[n+1]
+ A = len(xys_ad)
+ B = len(xys_bd)
+ D = len(xys_ad[0])
+ diff_d = (np.array(src_xys_n[n]) - np.array(src_xys_n[n+1]))[None,:]
+ diff_abd = xys_ad[:,None,:] - xys_bd[None,:,:]
+ diffdiff_abd = diff_abd - diff_d
+ edgecosts_ab = np.sqrt((diffdiff_abd**2).sum(axis=2))
+ edgecosts.append(edgecosts_ab)
+
+ paths_mn, costs_m = sp.shortest_paths(nodecosts, edgecosts)
+
+
+
+ bestpath_n = paths_mn[np.argmin(costs_m)]
+ return bestpath_n
+
+def dp_match_ternary(xys_n, vs_n, src_xys_n):
+ N = len(xys_n)
+ assert len(vs_n) == N
+ assert len(src_xys_n) == N
+
+ nodecosts = vs_n
+ edgecosts = []
+ tripcosts = []
+
+
+ diff_d = (np.array(src_xys_n[0]) - np.array(src_xys_n[1]))[None,:]
+ diff_abd = xys_n[0][:,None,:] - xys_n[1][None,:,:]
+ lengths_ab = mu.norms(diff_abd, 2)
+ edge_coef = 10
+ edgecosts.append(np.abs(np.log(lengths_ab / np.linalg.norm(diff_d)))*edge_coef)
+
+ #edgecosts.append(np.zeros((xys_n[0].shape[0],xys_n[1].shape[0])))
+ for n in xrange(2,N):
+ xys_ad = xys_n[n-2]
+ xys_bd = xys_n[n-1]
+ xys_cd = xys_n[n]
+ A = len(xys_ad)
+ B = len(xys_bd)
+ C = len(xys_cd)
+ tripcosts_abc = np.empty((A,B,C))
+ oldtri = np.array([src_xys_n[n-2], src_xys_n[n-1], src_xys_n[n]])
+ oldsides = np.array([oldtri[1]-oldtri[2], oldtri[2]-oldtri[0], oldtri[0]-oldtri[1]])
+ oldcomplex = oldsides[:,0] + 1j*oldsides[:,1]
+ oldcomplexratios = np.array([oldcomplex[1]/oldcomplex[2],oldcomplex[2]/oldcomplex[0],oldcomplex[0]/oldcomplex[1]])
+ oldsidelengths = mu.norms(oldsides, 1)
+ oldangles = np.arccos([oldsides[1].dot(oldsides[2])/np.linalg.norm(oldsides[1])/np.linalg.norm(oldsides[2]),
+ oldsides[2].dot(oldsides[0])/np.linalg.norm(oldsides[2])/np.linalg.norm(oldsides[0]),
+ oldsides[0].dot(oldsides[1])/np.linalg.norm(oldsides[0])/np.linalg.norm(oldsides[1])])
+ for a in xrange(A):
+ for b in xrange(B):
+ for c in xrange(C):
+
+ newtri = np.array([xys_ad[a], xys_bd[b], xys_cd[c]])
+ newsides = np.array([newtri[1]-newtri[2],newtri[2]-newtri[0], newtri[0]-newtri[1]])
+ #newsidelengths = mu.norms(newsides,1)
+ newcomplex = newsides[:,0] + 1j*newsides[:,1]
+ newcomplexratios = np.array([newcomplex[1]/newcomplex[2],newcomplex[2]/newcomplex[0],newcomplex[0]/newcomplex[1]])
+
+ #newangles = np.arccos([newsides[1].dot(newsides[2])/np.linalg.norm(newsides[1])/np.linalg.norm(newsides[2]),
+ #newsides[2].dot(newsides[0])/np.linalg.norm(newsides[2])/np.linalg.norm(newsides[0]),
+ #newsides[0].dot(newsides[1])/np.linalg.norm(newsides[0])/np.linalg.norm(newsides[1])])
+ #tripcosts_abc[a,b,c] = np.linalg.norm(newsidelengths - oldsidelengths)
+ #tripcosts_abc[a,b,c] = np.linalg.norm(newangles - oldangles)*10
+ tripcosts_abc[a,b,c] = (np.abs(newcomplexratios - oldcomplexratios)**2).sum()
+ tripcosts.append(tripcosts_abc)
+
+
+ diff_d = (np.array(src_xys_n[n-1]) - np.array(src_xys_n[n]))[None,:]
+ diff_bcd = xys_bd[:,None,:] - xys_cd[None,:,:]
+ lengths_bc = mu.norms(diff_bcd, 2)
+ edgecosts.append(np.abs(np.log(lengths_bc / np.linalg.norm(diff_d)))*edge_coef)
+
+
+ #diff_d = (np.array(src_xys_n[n]) - np.array(src_xys_n[n+1]))[None,:]
+ #diff_abd = xys_ad[:,None,:] - xys_bd[None,:,:]
+ #diffdiff_abd = diff_abd - diff_d
+ #edgecosts_ab = np.sqrt((diffdiff_abd**2).sum(axis=2))
+ #edgecosts.append(edgecosts_ab)
+
+
+ bestpath,cost = sp.shortest_paths_ternary(nodecosts, edgecosts,tripcosts)
+ print "cost",cost
+ return bestpath
+
+
+def get_matches(src_img, xy, targ_img, patch_size = 25, max_deficit = .3, min_match_distance = 10, max_num_matches = 20):
+ assert patch_size % 2 == 1
+ patch_radius = patch_size // 2
+ assert src_img.ndim == 3
+ col_keypoint, row_keypoint = xy
+
+ assert row_keypoint - patch_radius >= 0
+ assert row_keypoint + patch_radius < src_img.shape[0]
+ assert col_keypoint - patch_radius >= 0
+ assert col_keypoint + patch_radius < src_img.shape[1]
+
+
+ patch = src_img[row_keypoint-patch_radius:row_keypoint+patch_radius+1,
+ col_keypoint-patch_radius:col_keypoint+patch_radius+1, :]
+
+ heatmap = cv2.matchTemplate(targ_img, patch, cv2.TM_CCOEFF_NORMED)
+
+ sorted_heatmap_values = np.sort(heatmap.flatten())
+ abs_thresh = sorted_heatmap_values[-1] - max_deficit
+
+ heatmap_maxes = ndi.maximum_filter(heatmap, size=(min_match_distance, min_match_distance))
+ rows_local_maxima, cols_local_maxima = np.nonzero(heatmap_maxes == heatmap)
+
+
+ rc_good_local_maxima = []
+ vals_good_local_maxima = []
+
+ for (row_lm, col_lm) in zip(rows_local_maxima, cols_local_maxima):
+ val = heatmap[row_lm, col_lm]
+ if val >= abs_thresh:
+ rc_good_local_maxima.append((row_lm, col_lm))
+ vals_good_local_maxima.append(val)
+
+ rc_good_local_maxima = np.array(rc_good_local_maxima)
+ vals_good_local_maxima = np.array(vals_good_local_maxima)
+ sort_inds = (-vals_good_local_maxima).argsort()[:max_num_matches]
+
+ rc_good_local_maxima = rc_good_local_maxima[sort_inds]
+ vals_good_local_maxima = vals_good_local_maxima[sort_inds]
+
+
+ if DEBUG_PLOTTING:
+ plot_img = cv2.cvtColor(heatmap, cv2.COLOR_GRAY2BGR)
+ for (i,(row, col), v) in zip(it.count(), rc_good_local_maxima, vals_good_local_maxima):
+ cv2.putText(plot_img, "%i:%.2f"%(i,v), (col, row), cv2.FONT_HERSHEY_PLAIN, 1.0, Colors.RED, thickness = 1)
+ cv2.circle(plot_img, (col, row), 3, Colors.GREEN)
+ global PLOT_COUNTER
+ cv2.imshow("get_matches_heatmap%i"%PLOT_COUNTER, plot_img)
+ PLOT_COUNTER += 1
+ while cv2.waitKey(10) == -1: pass
+
+ return rc_good_local_maxima[:,::-1] + patch_radius, vals_good_local_maxima
+
diff --git a/iros/iros_registration.py b/iros/iros_registration.py
new file mode 100644
index 0000000..e69de29
diff --git a/iros/iros_tps.py b/iros/iros_tps.py
new file mode 100644
index 0000000..b07d08d
--- /dev/null
+++ b/iros/iros_tps.py
@@ -0,0 +1,118 @@
+import numpy as np
+import os.path as osp
+import os
+from glob import glob
+import lfd.tps as tps
+from jds_utils.colorize import colorize
+from lfd import fastmath, registration
+import iros.image_registration as ir
+
+
+PLOT = False
+
+def axisanglepart(m):
+ if np.linalg.det(m) < 0 or np.isnan(m).any(): raise Exception
+ u,s,vh = np.linalg.svd(m)
+ rotpart = u.dot(vh)
+ theta = np.arccos((np.trace(rotpart) - 1)/2)
+ axis = (1/(2*np.sin(theta))) * np.array([[rotpart[2,1] - rotpart[1,2], rotpart[0,2]-rotpart[2,0], rotpart[1,0]-rotpart[0,1]]])
+ return theta, axis
+
+
+
+def fit(demo_keypts, current_keypts):
+ rot_coefs = .01*np.array((0.01,0.01,0.0025))
+ scale_coef= .01*.01
+ fastmath.set_coeffs(rot_coefs, scale_coef)
+ rfunc = fastmath.rot_reg
+ lin_ag, trans_g, w_ng = tps.tps_fit_regrot(demo_keypts, current_keypts, bend_coef = 1, rfunc=rfunc)
+ #spline.fit(demo_keypts, current_keypts,bend_coef=10,rot_coef=.1)
+ print "-----------------------------"
+ print "dataset %i"%i
+ centertrans_g = tps.tps_eval([demo_keypts.mean(axis=0)], lin_ag, trans_g, w_ng, demo_keypts)[0]-demo_keypts.mean(axis=0)
+ print "translation of center:\n", centertrans_g
+ print "linear:\n",lin_ag
+ print "axis-angle", axisanglepart(lin_ag.T)
+ print "singular vals", np.linalg.svd(lin_ag.T)[1]
+ print "nonlinear:\n",w_ng
+ print "residuals:\n",tps.tps_eval(demo_keypts, lin_ag, trans_g, w_ng, demo_keypts)-current_keypts
+ max_residual = (tps.tps_eval(demo_keypts, lin_ag, trans_g, w_ng, demo_keypts)-current_keypts).max()
+ print "max residual:%.3f"%max_residual
+ if max_residual > .015:
+ print colorize("warning: large residual (%.3f)"%max_residual,'red',highlight=True)
+ if np.linalg.norm(centertrans_g) > .4:
+ print colorize("warning: large translation (%.3f)"%np.linalg.norm(centertrans_g),'red',highlight=True)
+
+ if PLOT:
+ import mayavi.mlab as mlab
+ mlab.clf()
+ x,y,z = demo_keypts.T
+ mlab.points3d(x,y,z,color=(0,1,0))
+ x,y,z = current_keypts.T
+ mlab.points3d(x,y,z,color=(1,0,0))
+ for (demopt, curpt) in zip(demo_keypts, current_keypts):
+ mlab.plot3d([demopt[0],curpt[0]], [demopt[1],curpt[1]], [demopt[2], curpt[2]],color=(1,1,0),tube_radius=.001)
+ mlab.show()
+ f = registration.ThinPlateSpline()
+ f.lin_ag = lin_ag
+ f.trans_g = trans_g
+ f.w_ng = w_ng
+ f.x_na = demo_keypts
+
+ return f
+
+def xyz2rc(xyz, xyz_img):
+ diffs_rc = ((xyz_img - xyz)**2).sum(axis=2)
+ diffs_rc[np.isnan(diffs_rc)] = 1e100
+ mindist = diffs_rc.min()
+ if mindist > 1e-4:
+ print "warning: nonzero minimum distance:",mindist
+ return np.unravel_index(diffs_rc.argmin(), diffs_rc.shape)
+
+def test_fitting():
+ testdata_dir = osp.join(os.getenv("IROS_DATA_DIR"), "testdata")
+ fnames = glob(osp.join(testdata_dir,"segment*.npz"))
+ np.set_printoptions(precision=3)
+
+ for (i,fname) in enumerate(fnames):
+ f = np.load(fname)
+ (current_xyz, current_rgb, current_keypts,demo_xyz, demo_rgb, demo_keypts) = \
+ [f[key] for key in ["current_xyz", "current_rgb", "exec_keypts", "demo_xyz", "demo_rgb", "demo_keypts"]]
+ f = fit(demo_keypts, current_keypts)
+
+def test_keypoint_matching():
+ iros_data_dir = osp.join(os.getenv("IROS_DATA_DIR"))
+ testdata_dir = osp.join(os.getenv("IROS_DATA_DIR"), "testdata")
+ fnames = glob(osp.join(testdata_dir,"segment*.npz"))
+ np.set_printoptions(precision=3)
+ for (i,fname) in enumerate(fnames):
+
+ print "**** dataset %i ****"%i
+ f = np.load(fname)
+ (current_xyz, current_rgb, current_keypts,demo_xyz, demo_rgb, demo_keypts) = \
+ [f[key] for key in ["current_xyz", "current_rgb", "exec_keypts", "demo_xyz", "demo_rgb", "demo_keypts"]]
+
+ seg_num = int(osp.basename(fname)[7:9])
+ demo_rgb = np.load(glob(osp.join(iros_data_dir, "InterruptedSuture0", 'point_clouds', 'pt%i/seg%i_*_rgb_*.npy'%(0, seg_num)))[0])
+ current_xy = np.array([xyz2rc(xyz, current_xyz) for xyz in current_keypts])[:,::-1]
+ demo_xy = np.array([xyz2rc(xyz, demo_xyz) for xyz in demo_keypts])[:,::-1]
+ pred_current_xy = ir.register_images(demo_rgb, current_rgb, demo_xy)
+ for i in xrange(len(demo_xy)):
+ print "(%i, %i), (%i, %i), (%i, %i)"%tuple(current_xy[i].tolist() + pred_current_xy[i].tolist() + (current_xy[i] - pred_current_xy[i]).tolist())
+ cur_plot_img = current_rgb.copy()
+ demo_plot_img = demo_rgb.copy()
+ plot_points_numbered(cur_plot_img, pred_current_xy, ir.Colors.GREEN)
+ plot_points_numbered(cur_plot_img, current_xy, ir.Colors.BLUE)
+ plot_points_numbered(demo_plot_img, demo_xy, ir.Colors.RED)
+ while cv2.waitKey(10)==-1:
+ cv2.imshow("current_rgb", cur_plot_img)
+ cv2.imshow("demo_rgb", demo_plot_img)
+
+
+def plot_points_numbered(plot_img, xys, color=(255,255,255)):
+ for (i, (col, row)) in enumerate(xys):
+ cv2.putText(plot_img, str(i), (col, row), cv2.FONT_HERSHEY_PLAIN, 1.0, color, thickness = 1)
+
+if __name__ == "__main__":
+ import cv2
+ test_keypoint_matching()
\ No newline at end of file
diff --git a/iros/launch.py b/iros/launch.py
new file mode 100644
index 0000000..2e77dfc
--- /dev/null
+++ b/iros/launch.py
@@ -0,0 +1,6 @@
+import rospy
+from ArmPlannerPR2 import PlannerPR2
+
+if __name__ == "__main__":
+ rospy.init_node("Test_Planning")
+ p = PlannerPR2 ()
diff --git a/iros/make_suture_library.py b/iros/make_suture_library.py
new file mode 100644
index 0000000..8efc9e0
--- /dev/null
+++ b/iros/make_suture_library.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python
+
+import argparse
+parser = argparse.ArgumentParser()
+parser.add_argument("task")
+parser.add_argument("part_name")
+args = parser.parse_args()
+
+
+import lfd
+from lfd import bag_proc as bp
+import rosbag
+import os.path as osp
+from jds_utils.yes_or_no import yes_or_no
+import os, sys
+import yaml
+import h5py
+
+IROS_DATA_DIR = os.getenv("IROS_DATA_DIR")
+task_file = osp.join(IROS_DATA_DIR, "suture_demos2.yaml")
+with open(osp.join(IROS_DATA_DIR,task_file),"r") as fh:
+ task_info = yaml.load(fh)
+
+
+db_file = osp.join(IROS_DATA_DIR, task_info[args.task][args.part_name]["db_file"])
+print 'Writing to', db_file
+if osp.exists(db_file):
+ if yes_or_no(db_file + ' already exists. Overwrite?'):
+ os.remove(db_file)
+ else:
+ print 'Aborting.'
+ sys.exit(1)
+
+task_lib = h5py.File(db_file, mode="w")
+link_names = ["r_gripper_tool_frame", "l_gripper_tool_frame"]
+
+for (i_demo,bag_file) in enumerate(task_info[args.task][args.part_name]["demo_bag"]):
+ bag = rosbag.Bag(bag_file)
+ demo_segs = bp.create_segments(bag, link_names)
+
+ for (i_seg,demo_seg) in enumerate(demo_segs):
+ #if demo_seg["done"]:
+ # path = "%.2i.done"%i_demo
+ #else:
+ path = "%.2i.%.2i"%(i_demo, i_seg)
+ demo_seg["demo_index"] = i_demo
+ demo_seg["seg_index"] = i_seg
+ bp.dict_to_hdf(task_lib, demo_seg, path)
+
+
diff --git a/iros/multi_item_verb_demos2.yaml b/iros/multi_item_verb_demos2.yaml
new file mode 100644
index 0000000..8a0b89e
--- /dev/null
+++ b/iros/multi_item_verb_demos2.yaml
@@ -0,0 +1,8 @@
+
+# AUTOMATICALLY ADDED BY multi_item_teach_verb.py ON 2013-02-19
+pierce-needle-hole1:
+ stages: ['pierce-needle-hole1-0', 'pierce-needle-hole1-1']
+ verb: pierce
+ items: ['needle', 'hole']
+ arms_used: ['l', 'l']
+ special_pts : ['None', 'None']
\ No newline at end of file
diff --git a/iros/needleLoc.npy b/iros/needleLoc.npy
new file mode 100644
index 0000000..a1ccafe
Binary files /dev/null and b/iros/needleLoc.npy differ
diff --git a/iros/needleLocTest.npy b/iros/needleLocTest.npy
new file mode 100644
index 0000000..40613d5
Binary files /dev/null and b/iros/needleLocTest.npy differ
diff --git a/notes.txt b/iros/notes.txt
similarity index 100%
rename from notes.txt
rename to iros/notes.txt
diff --git a/iros/process_suture_bag.py b/iros/process_suture_bag.py
new file mode 100644
index 0000000..b9e5c31
--- /dev/null
+++ b/iros/process_suture_bag.py
@@ -0,0 +1,346 @@
+#!/usr/bin/env python
+
+import argparse
+parser = argparse.ArgumentParser()
+parser.add_argument("task")
+parser.add_argument("part_index",type=int)
+args = parser.parse_args()
+
+import lfd
+import iros
+from lfd import bag_proc as bp
+import rosbag
+import rospy
+import os.path as osp
+from jds_utils.yes_or_no import yes_or_no
+import brett2.ros_utils as ru
+from jds_utils.colorize import colorize
+import os, sys
+import yaml
+import h5py
+import numpy as np
+import cv2
+import simple_clicker as sc
+
+# find data files, files to save to
+IROS_DATA_DIR = os.getenv("IROS_DATA_DIR")
+IROS_DIR = osp.dirname(iros.__file__)
+
+task_file = osp.join(IROS_DIR, "suture_demos2.yaml")
+
+with open(osp.join(IROS_DIR, task_file),"r") as fh:
+ task_info = yaml.load(fh)
+
+bag = task_info[args.task][args.part_index]["demo_bag"]
+bag = rosbag.Bag(bag)
+
+jtf = osp.join(IROS_DATA_DIR, args.task, 'joint_trajectories', 'pt' + str(args.part_index))
+kpf = osp.join(IROS_DATA_DIR, args.task, 'keypoints', 'pt' + str(args.part_index))
+pcf = osp.join(IROS_DATA_DIR, args.task, 'point_clouds', 'pt' + str(args.part_index))
+
+#kpf = osp.join(IROS_DATA_DIR, 'InterruptedSuture0', 'keypoints', 'pt0')
+
+### extract kinematics info from bag
+def extract_kinematics(np_file, info):
+
+ for i in range(len(info)):
+ savefile = np_file + '/seg%s'%i
+ print 'Writing to', np_file
+
+ if osp.exists(savefile + '_larm.npy'):
+ if yes_or_no('/seg%s already exists. Overwrite?'%i):
+ os.remove(savefile + '_larm.npy')
+ os.remove(savefile + '_rarm.npy')
+ os.remove(savefile + '_lgrip.npy')
+ os.remove(savefile + '_rgrip.npy')
+ else:
+ print 'Aborting...'
+ sys.exit(1)
+
+ larm = info[i]["leftarm"]
+ rarm = info[i]["rightarm"]
+ lgrip = info[i]["l_gripper_joint"]
+ rgrip = info[i]["r_gripper_joint"]
+
+ np.save(savefile + '_larm.npy' , np.array(larm))
+ np.save(savefile + '_rarm.npy' , np.array(rarm))
+ np.save(savefile + '_lgrip.npy' , np.array(lgrip))
+ np.save(savefile + '_rgrip.npy' , np.array(rgrip))
+
+ print 'Saved all trajectories for %i segments'%i
+
+
+# creates the text for the yaml file for the demo
+def get_new_demo_entry_text(segment, hole1, hole2, tcut, mcut, bcut, needle):
+
+ new_entry_text = """
+ segment%(seg_num)s:
+ hole1 center: %(hole1)s
+ hole2 center: %(hole2)s
+ top of cut: %(tcut)s
+ middle of cut: %(mcut)s
+ bottom of cut: %(bcut)s
+ needle tip: %(needle)s
+""" % dict(
+ seg_num = segment,
+ hole1 = hole1,
+ hole2 = hole2,
+ tcut = tcut,
+ mcut = mcut,
+ bcut = bcut,
+ needle = needle,
+ )
+
+ return new_entry_text
+
+
+# appends the entry text to the yaml file
+def add_new_entry_to_yaml_file(data_dir, new_entry_text):
+ demofile = open(data_dir + "/suture_demos.yaml", "a")
+ demofile.write(new_entry_text)
+ demofile.flush()
+ os.fsync(demofile.fileno())
+ demofile.close()
+
+print 'getting bag info...'
+
+button_presses = bp.get_button_presses(bag)
+all_times = bp.read_button_presses(button_presses)
+start_times = [t for t, action in all_times if action == 'start']
+stop_times = [t for t, action in all_times if action == 'stop']
+look_times = [t for t, action in all_times if action == 'look']
+
+assert len(start_times) == len(stop_times)
+SEGNUM = len(look_times)
+
+
+if yes_or_no('Do you want to extract kinematics info from bag file?'):
+ #raw_input("press enter to extract kinematics info from bag file")
+
+ link_names = ["r_gripper_tool_frame", "l_gripper_tool_frame"]
+ kinematics_info = bp.extract_kinematics_from_bag(bag, link_names)
+ times = kinematics_info["time"]
+
+ start_inds = np.searchsorted(times, start_times)
+ stop_inds = np.searchsorted(times, stop_times)
+
+ forced_segs_info = []
+ for start_ind, stop_ind in zip(start_inds, stop_inds):
+ seg = bp.extract_segment(kinematics_info, start_ind, stop_ind)
+ forced_segs_info.append(seg)
+
+ extract_kinematics(jtf, forced_segs_info)
+
+ raw_input("Extracted segments. Press enter to continue")
+
+# determine which keypoints matter for each segment based on user input
+print 'Extracting look_time point clouds...'
+look_clouds = bp.get_transformed_clouds(bag, look_times)
+
+window_name = "Find Keypoints"
+keypt_list = ['lh', 'rh', 'tc', 'mc', 'bc', 'ne', 'nt', 'ntt', 'stand', 'rzr', 'none']
+keypoints_locations = []
+keypoints_names = []
+
+for s in range(SEGNUM):
+ print 'look time for segment %s'%s
+ keypt_names = []
+ keypt_locs = []
+ k = 0
+
+ while (True):
+ sc.show_pointclouds(look_clouds[s], window_name)
+ kp = raw_input("Which key points are important for this segment? choices are: " + str(keypt_list) + ". (please only enter one key point at a time): ")
+
+ if kp not in keypt_list:
+ print 'Incorrect input. Try again!'
+ continue
+
+ elif kp in ['lh', 'rh', 'bc', 'mc', 'tc', 'stand']:
+ print colorize("Looking for %s..."%kp, 'green', bold=True)
+
+ xyz_tf = look_clouds[s][0].copy()
+ rgb_plot = look_clouds[s][1].copy()
+ xyz_tf[np.isnan(xyz_tf)] = -2
+
+ kp_loc = sc.find_kp(kp, xyz_tf, rgb_plot, window_name)
+ keypt_locs.append(kp_loc)
+ if kp == 'lh': keypt_names.append('left_hole')
+ elif kp == 'rh': keypt_names.append('right_hole')
+ elif kp == 'stand': keypt_names.append('stand')
+ elif kp == 'bc': keypt_names.append('bot_cut')
+ elif kp == 'mc': keypt_names.append('mid_cut')
+ elif kp == 'tc': keypt_names.append('top_cut')
+
+ np.save(pcf + '/seg%s_%s_xyz_tf.npy'%(s,keypt_names[k]), xyz_tf)
+ np.save(pcf + '/seg%s_%s_rgb_pl.npy'%(s,keypt_names[k]), rgb_plot)
+
+ elif kp == 'rzr':
+ xyz_tfs = []
+ rgb_plots = []
+ num_clouds = 10
+
+ while True:
+ razor_look_times = []
+
+ print colorize("Looking for Razor...", 'green', bold=True)
+ print 'getting %s point clouds...'%num_clouds
+
+ for t in range(num_clouds):
+ razor_look_times.append(look_times[s] + t)
+
+ razor_clouds = bp.get_transformed_clouds(bag, razor_look_times)
+
+ for i in range(num_clouds):
+ xyz_tfs.append(razor_clouds[i][0].copy())
+ rgb_plots.append(razor_clouds[i][1].copy())
+
+ rzr_loc, valid_pts = sc.find_needle_end(xyz_tfs, rgb_plots, window_name)
+ if (valid_pts > 0) or (num_clouds >= 20):
+ del razor_look_times
+ break
+ else:
+ print colorize("Couldn't find the razor! Trying again with more point clouds", "red", True, True)
+ del needle_look_times
+ num_clouds += 5
+
+ keypt_locs.append(rzr_loc)
+ keypt_names.append('razor')
+
+ np.save(pcf + '/seg%s_%s_xyz_tfs.npy'%(s,keypt_names[k]), xyz_tfs)
+ np.save(pcf + '/seg%s_%s_rgb_pls.npy'%(s,keypt_names[k]), rgb_plots)
+
+ del xyz_tfs
+ del rgb_plots
+
+ elif kp == 'ne':
+ xyz_tfs = []
+ rgb_plots = []
+ num_clouds = 10
+
+ while True:
+ needle_look_times = []
+
+ print colorize("Looking for Needle End...", 'green', bold=True)
+ print 'getting %s point clouds...'%num_clouds
+
+ for t in range(num_clouds):
+ needle_look_times.append(look_times[s] + t)
+
+ needle_clouds = bp.get_transformed_clouds(bag, needle_look_times)
+
+ for i in range(num_clouds):
+ xyz_tfs.append(needle_clouds[i][0].copy())
+ rgb_plots.append(needle_clouds[i][1].copy())
+
+ needle_loc, valid_pts = sc.find_needle_end(xyz_tfs, rgb_plots, window_name)
+ if (valid_pts > 0) or (num_clouds >= 20):
+ del needle_look_times
+ break
+ else:
+ print colorize("Couldn't find the needle end! Trying again with more point clouds", "red", True, True)
+ del needle_look_times
+ num_clouds += 5
+
+ keypt_locs.append(needle_loc)
+ keypt_names.append('needle_end')
+
+ np.save(pcf + '/seg%s_%s_xyz_tfs.npy'%(s,keypt_names[k]), xyz_tfs)
+ np.save(pcf + '/seg%s_%s_rgb_pls.npy'%(s,keypt_names[k]), rgb_plots)
+
+ del xyz_tfs
+ del rgb_plots
+
+ elif kp == 'nt':
+ xyz_tfs = []
+ rgb_plots = []
+ num_clouds = 10
+
+ while(True):
+ needle_look_times = []
+
+ print colorize("Looking for Needle Tip...", 'green', bold=True)
+ print 'getting %s needle point clouds...'%num_clouds
+
+ for t in range(num_clouds):
+ needle_look_times.append(look_times[s] + t)
+
+ needle_clouds = bp.get_transformed_clouds(bag, needle_look_times)
+
+ for i in range(num_clouds):
+ xyz_tfs.append(needle_clouds[i][0].copy())
+ rgb_plots.append(needle_clouds[i][1].copy())
+
+ needle_loc = sc.find_needle_tip(xyz_tfs, rgb_plots, window_name, 'process')
+ if yes_or_no("Is this a good point?"):
+ break
+ else:
+ del needle_look_times
+ num_clouds += 5
+ print ("Trying again with more point clouds.")
+
+ keypt_locs.append(needle_loc)
+ keypt_names.append('needle_tip')
+ np.save(pcf + '/seg%s_%s_xyz_tfs.npy'%(s,keypt_names[k]), xyz_tfs)
+ np.save(pcf + '/seg%s_%s_rgb_pls.npy'%(s,keypt_names[k]), rgb_plots)
+
+ del xyz_tfs
+ del rgb_plots
+
+ elif kp == 'ntt':
+ xyz_tfs = []
+ rgb_plots = []
+ needle_look_times = []
+ num_clouds = 10
+
+ print colorize("Looking for Needle Tip...", 'green', bold=True)
+ print 'getting %s needle point clouds...'%num_clouds
+
+ for t in range(num_clouds):
+ needle_look_times.append(look_times[s] + t)
+
+ needle_clouds = bp.get_transformed_clouds(bag, needle_look_times)
+
+ for i in range(num_clouds):
+ xyz_tfs.append(needle_clouds[i][0].copy())
+ rgb_plots.append(needle_clouds[i][1].copy())
+
+ needle_loc = sc.find_needle_tip(xyz_tfs, rgb_plots, window_name, 'process')
+
+ keypt_locs.append((0,0,0))
+ keypt_names.append('tip_transform')
+ np.save(kpf + '/seg%s_needle_world_loc.npy'%s, needle_loc)
+ np.save(pcf + '/seg%s_%s_xyz_tfs.npy'%(s,keypt_names[k]), xyz_tfs)
+ np.save(pcf + '/seg%s_%s_rgb_pls.npy'%(s,keypt_names[k]), rgb_plots)
+
+ del needle_look_times
+ del xyz_tfs
+ del rgb_plots
+
+ elif kp == 'none':
+ keypt_locs.append((0,0,0))
+ keypt_names.append('none')
+
+ if yes_or_no('Key point %s saved for this segment. Is there another key point for this segment?'%kp):
+ k += 1
+ continue
+ else:
+ keypoints_locations.append(keypt_locs)
+ keypoints_names.append(keypt_names)
+ print 'keypoints_names', keypoints_names
+ del keypt_locs
+ del keypt_names
+ break
+
+
+raw_input("Finished processing point clouds. Press enter to save key points to numpy file")
+np.save(kpf + '/keypoints.npy', keypoints_locations)
+np.save(kpf + '/keypoints_names.npy', keypoints_names)
+
+
+#raw_input("press enter to add new entry to yaml file")
+
+#entry = get_new_demo_entry_text(i, hole1_tracker[i], hole2_tracker[i], tcut_tracker[i], mcut_tracker[i], bcut_tracker[i], needle_tracker[i])
+#add_new_entry_to_yaml_file(IROS_DATA_DIR, str(SEGNUM))
+
+raw_input("press enter to finish")
diff --git a/iros/simple_clicker.py b/iros/simple_clicker.py
new file mode 100644
index 0000000..7119728
--- /dev/null
+++ b/iros/simple_clicker.py
@@ -0,0 +1,376 @@
+
+import cv2
+import numpy as np
+import sys
+import rospy
+
+from jds_utils.colorize import colorize
+import brett2.ros_utils as ru
+import sensor_msgs.msg as sm
+
+
+WIN_NAME = "Find Keypoints"
+
+def get_kp_clouds(listener, cloud_topic, num_clouds):
+
+ class MessageGetter:
+
+ def __init__(self, n_msgs):
+ self.n_msgs = n_msgs
+ self.msgs = []
+ self.done = False
+ def callback(self,msg):
+ if self.done:
+ return
+ elif len(self.msgs) < self.n_msgs:
+ self.msgs.append(msg)
+ else:
+ self.done = True
+
+ def get_msgs(n_msgs, topic, msgtype):
+ print "waiting for %i messages on topic %s"%(n_msgs, topic)
+ mg = MessageGetter(n_msgs)
+ sub = rospy.Subscriber(topic, msgtype, mg.callback)
+ while not mg.done:
+ rospy.sleep(.05)
+ sub.unregister()
+ return mg.msgs
+
+ msgs = get_msgs(num_clouds, cloud_topic, sm.PointCloud2)
+
+ if num_clouds == 1:
+ msg = msgs[0]
+ xyz, rgb = ru.pc2xyzrgb(msg)
+
+ if (xyz.shape[0] == 1 or xyz.shape[1] == 1): raise Exception("needs to be an organized point cloud")
+
+ xyz_tf = ru.transform_points(xyz, listener, "base_footprint", "/camera_rgb_optical_frame")
+ rgb_plot = rgb.copy()
+
+ return xyz_tf, rgb_plot
+
+ else:
+ xyz_tfs = []
+ rgb_plots = []
+ for i in range(num_clouds):
+ msg = msgs[i]
+ xyz, rgb = ru.pc2xyzrgb(msg)
+
+ if (xyz.shape[0] == 1 or xyz.shape[1] == 1): raise Exception("needs to be an organized point cloud")
+
+ xyz_tf = ru.transform_points(xyz, listener, "base_footprint", "/camera_rgb_optical_frame")
+
+ xyz_tfs.append(xyz_tf)
+ rgb_plots.append(rgb)
+
+ return xyz_tfs, rgb_plots
+
+
+def get_last_kp_loc(exec_keypts, desired_keypt, current_seg):
+
+ search_seg = current_seg - 1
+
+ while(True):
+ if search_seg < 0:
+ print "Reached beginning of execution and couldn't find desired keypoint! Aborting..."
+ sys.exit(1)
+ else:
+ search_seg_names = exec_keypts[search_seg]["names"]
+ search_seg_locs = exec_keypts[search_seg]["locations"]
+
+ for k in range(len(search_seg_names)):
+ if search_seg_names[k] == desired_keypt:
+ kp_loc = search_seg_locs[k]
+ kp_found = True
+
+ if kp_found:
+ return kp_loc, search_seg
+ else:
+ search_seg -= 1
+
+
+def show_pointclouds(clouds, WIN_NAME):
+ nc = len(clouds)
+
+ if nc == 2:
+ rgb_plot = clouds[1].copy()
+ cv2.imshow(WIN_NAME, rgb_plot)
+ cv2.waitKey(100)
+ else:
+ rgb_plot_multi = []
+ for i in range(nc):
+ rgb_plot_multi.append(clouds[i][1].copy())
+ cv2.imshow(WIN_NAME, rgb_plot_multi[i])
+ cv2.waitKey(100)
+
+#########################################
+### find all keypoints
+#########################################
+def get_kp_locations(kp_names, exec_keypts, current_seg, cloud_topic):
+ listener = ru.get_tf_listener()
+ kp_locations = []
+ #kp_xypixels = []
+
+ seg_kps = []
+ if kp_names[0] == "tip_transform":
+ while True:
+ xyz_tfs, rgb_plots = get_kp_clouds(listener, cloud_topic, 30)
+ needle_tip_loc = find_needle_tip(xyz_tfs, rgb_plots, WIN_NAME)
+ if needle_tip_loc[2] > 1:
+ return needle_tip_loc
+ else:
+ print colorize("Didn't find a high enough point! Trying again","red", True, True)
+
+ elif kp_names[0] in ["needle_end", "razor"]:
+ while True:
+ xyz_tfs, rgb_plots = get_kp_clouds(listener, cloud_topic, 30)
+ kp_loc, pts = find_red_block(xyz_tfs, rgb_plots, WIN_NAME)
+ if pts > 0:
+ seg_kps.append(kp_loc)
+ #kp_xypixels.append((0,0))
+ break
+ else:
+ print colorize("Couldn't find the keypoint! Trying again","red", True, True)
+
+ elif kp_names[0] == "needle_tip":
+ while True:
+ xyz_tfs, rgb_plots = get_kp_clouds(listener, cloud_topic, 30)
+ kp_loc = find_needle_tip(xyz_tfs, rgb_plots, WIN_NAME)
+ if kp_loc[2] > 0.8:
+ seg_kps.append(kp_loc)
+ #kp_xypixels.append((0,0))
+ break
+ else:
+ print colorize("Didn't find a high enough point! Trying again","red", True, True)
+
+ else:
+ xyz_tf, rgb_plot = get_kp_clouds(listener, cloud_topic, 1)
+ np.save("/tmp/xyz_tf.npy",xyz_tf)
+ np.save("/tmp/rgb.npy",rgb_plot)
+ for k in range(len(kp_names)):
+ kp_loc = find_kp(kp_names[k], xyz_tf, rgb_plot, exec_keypts, current_seg, WIN_NAME)
+ #kp_xypixels.append(kp_xypix)
+ seg_kps.append(kp_loc)
+
+ return seg_kps
+
+
+
+#########################################
+### find a single keypoint
+#########################################
+def find_kp(kp, xyz_tf, rgb_plot, past_keypts, current_seg, WIN_NAME):
+ ### clicking set-up
+ class GetClick:
+ x = None
+ y = None
+ done = False
+ def callback(self, event, x, y, flags, param):
+ if self.done:
+ return
+ elif event == cv2.EVENT_LBUTTONDOWN:
+ self.x = x
+ self.y = y
+ self.done = True
+
+ print colorize("Click on the center of the %s."%kp, 'red', bold=True)
+ print colorize("If this keypont is occluded, select the image window and press any key", 'red', bold=True)
+
+ gc = GetClick()
+ cv2.setMouseCallback(WIN_NAME, gc.callback)
+ while not gc.done:
+ cv2.imshow(WIN_NAME, rgb_plot)
+ k = cv2.waitKey(100)
+ if k == -1:
+ continue
+ else:
+ last_loc, found_seg = get_last_kp_loc(past_keypts, kp, current_seg)
+ print kp, "found in segment %s at location %s"%(found_seg, last_loc)
+ return last_loc
+
+ row_kp = gc.x
+ col_kp = gc.y
+
+ cv2.circle(rgb_plot, (row_kp, col_kp), 5, (0, 0, 255), -1)
+ cv2.imshow(WIN_NAME, rgb_plot)
+ cv2.waitKey(100)
+
+ xyz_tf[np.isnan(xyz_tf)] = -2
+ x, y, z = xyz_tf[col_kp, row_kp]
+
+ print kp, "3d location", x, y, z
+
+ #return (x, y, z), (col_kp, row_kp)
+ return (x, y, z)
+
+
+#########################################
+### find needle
+#########################################
+def find_needle_tip(xyz_tfs, rgb_plots, WIN_NAME):
+
+ ### clicking set-up
+ class GetClick:
+ xy = None
+ done = False
+ def callback(self, event, x, y, flags, param):
+ if self.done:
+ return
+ elif event == cv2.EVENT_LBUTTONDOWN:
+ self.xy = (x,y)
+ self.done = True
+
+ needle_rect_corners = []
+ nc = len(xyz_tfs)
+ rgb_plot = rgb_plots[0].copy()
+
+ print colorize("click at the corners of a rectangle which encompasses the needle tip", 'red', bold=True)
+
+ for i in xrange(2):
+ gc = GetClick()
+ cv2.setMouseCallback(WIN_NAME, gc.callback)
+ while not gc.done:
+ cv2.imshow(WIN_NAME, rgb_plot)
+ cv2.waitKey(10)
+ needle_rect_corners.append(gc.xy)
+
+ xy_tl = np.array(needle_rect_corners).min(axis=0)
+ xy_br = np.array(needle_rect_corners).max(axis=0)
+
+ cv2.rectangle(rgb_plot, tuple(xy_tl), tuple(xy_br), (0, 255, 0))
+ cv2.imshow(WIN_NAME, rgb_plot)
+ cv2.waitKey(10)
+
+ colmin, rowmin = xy_tl
+ colmax, rowmax = xy_br
+
+ row_needle = []
+ col_needle = []
+
+ xneedle = np.zeros((nc))
+ yneedle = np.zeros((nc))
+ zneedle = np.zeros((nc))
+
+ # extract depths from drawn rectangle
+ for i in range(nc):
+
+ z_rectangle = xyz_tfs[i][rowmin:rowmax, colmin:colmax, 2] # z values in extracted rectangle
+ z_rectangle[np.isnan(z_rectangle)] = -2
+ row_needle_temp, col_needle_temp = np.unravel_index(z_rectangle.argmax(), z_rectangle.shape)
+
+ col_needle_temp += colmin # since these indices are from small rectangle
+ row_needle_temp += rowmin
+
+ col_needle.append(col_needle_temp)
+ row_needle.append(row_needle_temp)
+
+ xneedle[i], yneedle[i], zneedle[i] = xyz_tfs[i][row_needle_temp, col_needle_temp]
+
+ del z_rectangle
+
+ ind = np.argmax(zneedle)
+
+ max_needle = []
+ max_needle.append(xneedle[ind])
+ max_needle.append(yneedle[ind])
+ max_needle.append(zneedle[ind])
+
+ #cv2.circle(rgb_plots[ind], (col_needle[ind], row_needle[ind]), 3, (255, 0, 0), 2)
+ #cv2.imshow(WIN_NAME, rgb_plots[ind])
+ #cv2.waitKey(100)
+
+
+ cv2.circle(rgb_plot, (col_needle[ind], row_needle[ind]), 3, (255, 0, 0), 2)
+ cv2.imshow(WIN_NAME, rgb_plot)
+ cv2.waitKey(100)
+
+ print 'needle tip 3d location', max_needle
+
+ # plot the point cloud with a circle around "highest" point
+ #from mayavi import mlab
+ #x,y,z = clouds[ind][0][~np.isnan(clouds[ind][0][:,:,0])].T
+ #mlab.points3d(x,y,z,color=(1,0,0), mode="2dvertex")
+ #mlab.points3d([max_needle[0]], [max_needle[1]], [max_needle[2]], color=(0,1,0), mode="sphere", scale_factor=.04, opacity=.2)
+ #mlab.show()
+
+ #raw_input("press enter when done looking")
+ #print "at end of needle tip func"
+
+ return max_needle
+
+#########################################
+### find needle
+#########################################
+def find_red_block(xyz_tfs, rgbs, WIN_NAME):
+
+ ### clicking set-up
+ class GetClick:
+ xy = None
+ done = False
+ def callback(self, event, x, y, flags, param):
+ if self.done:
+ return
+ elif event == cv2.EVENT_LBUTTONDOWN:
+ self.xy = (x,y)
+ self.done = True
+
+ needle_rect_corners = []
+ nc = len(xyz_tfs)
+ rgb_plot = rgbs[0].copy()
+ print colorize("click at the corners of a rectangle which encompasses the end of the needle", 'red', bold=True)
+
+ for i in xrange(2):
+ gc = GetClick()
+ cv2.setMouseCallback(WIN_NAME, gc.callback)
+ while not gc.done:
+ cv2.imshow(WIN_NAME, rgb_plot)
+ cv2.waitKey(10)
+ needle_rect_corners.append(gc.xy)
+
+ xy_tl = np.array(needle_rect_corners).min(axis=0)
+ xy_br = np.array(needle_rect_corners).max(axis=0)
+
+ cv2.rectangle(rgb_plot, tuple(xy_tl), tuple(xy_br), (0, 255, 0))
+ cv2.imshow(WIN_NAME, rgb_plot)
+ cv2.waitKey(100)
+
+ colmin, rowmin = xy_tl
+ colmax, rowmax = xy_br
+
+ high_red_xyz = []
+ high_red_rc = []
+
+ valid_pts = 0
+ # extract depths from drawn rectangle
+
+ FOAM_HEIGHT = .85
+
+ for i in range(nc):
+ z_rectangle = xyz_tfs[i][rowmin:rowmax, colmin:colmax, 2] # z values in extracted rectangle
+
+ hsv = cv2.cvtColor(rgbs[i][rowmin:rowmax, colmin:colmax].copy(),cv2.COLOR_BGR2HSV)
+ h=hsv[:,:,0]
+ s=hsv[:,:,1]
+ v=hsv[:,:,2]
+
+ color_mask = ((h<20) | (h>150)) & (s > 100) & (v > 90)
+ height_mask = z_rectangle > FOAM_HEIGHT + .025
+
+ total_mask = color_mask & height_mask
+ print "%ith image has %i valid points (above foam and red)"%(i, total_mask.sum())
+ valid_pts += total_mask.sum()
+
+ high_red_xyz.extend(xyz_tfs[i][rowmin:rowmax, colmin:colmax, :][total_mask])
+
+ xyz_avg = np.median(high_red_xyz,axis=0)
+
+ from jds_image_proc.pcl_utils import xyz2uv
+ row, col = xyz2uv(xyz_avg).astype('int')[0]
+
+ cv2.circle(rgb_plot, (row, col), 3, (255, 0, 0), 2)
+ cv2.imshow(WIN_NAME, rgb_plot)
+ cv2.waitKey(100)
+
+ print 'Needle end location', xyz_avg
+
+ return xyz_avg, valid_pts
\ No newline at end of file
diff --git a/iros/suture_demos.yaml b/iros/suture_demos.yaml
new file mode 100644
index 0000000..874abda
--- /dev/null
+++ b/iros/suture_demos.yaml
@@ -0,0 +1,246 @@
+InterruptedSuture6:
+ - part_name: pierce
+ demo_bag: /mnt/storage/mtayfred/data/bags/pierce_w_startstopseg6.bag
+ segments:
+ - description: grab needle from stand
+ - description: look at needle tip
+ extra_info: [left_grab]
+ - description: grab flap
+ - description: pierce
+ left_end_effector: needle_tip
+ - description: push needle through
+ - description: grab needle with right hand
+
+ - part_name: two_loop_knot
+
+#includes look time for after needle grab with right hand
+InterruptedSuture7:
+ - part_name: pierce
+ demo_bag: /mnt/storage/mtayfred/data/bags/pierce_w_startstopseg7.bag
+ segments:
+ - description: grab needle from stand
+ - description: look at needle tip
+ extra_info: [left_grab]
+ - description: grab flap
+ - description: pierce
+ left_end_effector: needle_tip
+ - description: push needle through
+ - description: grab needle with right hand
+ - description: pass needle back to left hand
+ - description: pull thread
+ - description: pull thread, put needle back in stand
+
+ - part_name: two_loop_knot
+
+#includes additional look time for after passing the needle back to the left hand
+InterruptedSuture8:
+ - part_name: pierce
+ demo_bag: /mnt/storage/mtayfred/data/bags/pierce_w_startstopseg8.bag
+ segments:
+ - description: grab needle from stand
+ - description: look at needle tip
+ extra_info: [left_grab]
+ - description: grab flap
+ - description: pierce
+ left_end_effector: needle_tip
+ - description: push needle through
+ - description: grab needle with right hand
+ - description: pass needle back to left hand
+ - description: look at needle tip
+ - description: pull thread
+ - description: pull thread, put needle back in stand
+ left_end_effector: needle_tip
+ extra_info: [left_grab]
+
+ - part_name: two_loop_knot
+
+#combination of 7 and 8 -- doesn't have it's own bag file
+InterruptedSuture9:
+ - part_name: pierce
+ demo_bag:
+ segments:
+ # seg0
+ - description: grab needle from stand
+ mini-segments:
+ - arms_used: [l]
+ - arms_used: [l]
+ # seg1
+ - description: look at needle tip
+ mini-segments:
+ - arms_used: [l]
+ extra_info: [left_grab]
+ # seg2
+ - description: grab flap
+ mini-segments:
+ - arms_used: [r]
+ - arms_used: [r]
+ # seg3
+ - description: pierce
+ mini-segments:
+ - arms_used: [l]
+ - arms_used: [r]
+ left_end_effector: needle_tip
+ # seg4
+ - description: push needle through
+ mini-segments:
+ - arms_used: [l,r]
+ # seg5
+ - description: grab needle with right hand
+ mini-segments:
+ - arms_used: [r]
+ - arms_used: [r]
+ - arms_used: [l,r]
+ # seg6
+ - description: pass needle back to left hand
+ mini-segments:
+ - arms_used: [l]
+ - arms_used: [l]
+ - arms_used: [l,r]
+ # seg7
+ - description: look at needle tip
+ mini-segments:
+ - arms_used: [l]
+ # seg8
+ - description: pull thread
+ mini-segments:
+ - arms_used: [r]
+ - arms_used: [r]
+ - arms_used: [r]
+ # seg9
+ - description: pull thread, put needle back in stand
+ mini-segments:
+ - arms_used: [r]
+ - arms_used: [r]
+ - arms_used: [r]
+ - arms_used: [l]
+ left_end_effector: needle_tip
+ extra_info: [left_grab]
+
+ - part_name: two_loop_knot
+
+InterruptedSuture10:
+ - part_name: pierce
+ demo_bag: /mnt/storage/mtayfred/data/bags/pierce_w_startstopseg10.bag
+ segments:
+ # seg0
+ - description: grab needle from stand
+ mini-segments:
+ - arms_used: [l]
+ - arms_used: [l]
+ # seg1
+ - description: look at needle tip
+ mini-segments:
+ - arms_used: [l]
+ extra_info: [left_grab]
+ # seg2
+ - description: grab flap
+ mini-segments:
+ - arms_used: [r]
+ - arms_used: [r]
+ # seg3
+ - description: pierce
+ mini-segments:
+ - arms_used: [l]
+ - arms_used: [r]
+ left_end_effector: needle_tip
+ # seg4
+ - description: push needle through
+ mini-segments:
+ - arms_used: [l,r]
+ # seg5
+ - description: grab needle with right hand
+ mini-segments:
+ - arms_used: [r]
+ - arms_used: [r]
+ - arms_used: [l,r]
+ # seg6
+ - description: pass needle back to left hand
+ mini-segments:
+ - arms_used: [l]
+ - arms_used: [l]
+ - arms_used: [l,r]
+ # seg7
+ - description: look at needle tip
+ mini-segments:
+ - arms_used: [l]
+ # seg8
+ - description: pull thread
+ mini-segments:
+ - arms_used: [r]
+ - arms_used: [r]
+ - arms_used: [r]
+ # seg9
+ - description: pull thread, put needle back in stand
+ mini-segments:
+ - arms_used: [r]
+ - arms_used: [r]
+ - arms_used: [r]
+ - arms_used: [l]
+ left_end_effector: needle_tip
+ extra_info: [left_grab]
+
+ - part_name: two_loop_knot
+
+InterruptedSuture11:
+ - part_name: pierce
+ demo_bag: /mnt/storage/mtayfred/data/bags/pierce_w_startstopseg11.bag
+ segments:
+ # seg0
+ - description: grab needle from stand
+ mini-segments:
+ - arms_used: [l]
+ - arms_used: [l]
+ # seg1
+ - description: look at needle tip
+ mini-segments:
+ - arms_used: [l]
+ extra_info: [left_grab]
+ # seg2
+ - description: grab flap
+ mini-segments:
+ - arms_used: [r]
+ - arms_used: [r]
+ # seg3
+ - description: pierce
+ mini-segments:
+ - arms_used: [l]
+ - arms_used: [r]
+ left_end_effector: needle_tip
+ # seg4
+ - description: push needle through
+ mini-segments:
+ - arms_used: [l,r]
+ # seg5
+ - description: grab needle with right hand
+ mini-segments:
+ - arms_used: [r]
+ - arms_used: [r]
+ - arms_used: [l,r]
+ # seg6
+ - description: pass needle back to left hand
+ mini-segments:
+ - arms_used: [l]
+ - arms_used: [l]
+ - arms_used: [l,r]
+ # seg7
+ - description: look at needle tip
+ mini-segments:
+ - arms_used: [l]
+ extra_info: [left_grab]
+ # seg8
+ - description: pull thread
+ mini-segments:
+ - arms_used: [r]
+ - arms_used: [r]
+ - arms_used: [r]
+ # seg9
+ - description: pull thread, put needle back in stand
+ mini-segments:
+ - arms_used: [r]
+ - arms_used: [r]
+ - arms_used: [r,l]
+ - arms_used: [l]
+ left_end_effector: needle_tip
+
+
+ - part_name: two_loop_knot
\ No newline at end of file
diff --git a/iros/test_tps_fitting.py b/iros/test_tps_fitting.py
new file mode 100644
index 0000000..f939d67
--- /dev/null
+++ b/iros/test_tps_fitting.py
@@ -0,0 +1,61 @@
+import numpy as np
+import os.path as osp
+import os
+from glob import glob
+import lfd.tps as tps
+from jds_utils.colorize import colorize
+from lfd import fastmath
+testdata_dir = osp.join(os.getenv("IROS_DATA_DIR"), "testdata")
+fnames = glob(osp.join(testdata_dir,"segment*.npz"))
+np.set_printoptions(precision=3)
+
+PLOT = False
+
+def axisanglepart(m):
+ if np.linalg.det(m) < 0 or np.isnan(m).any(): raise Exception
+ u,s,vh = np.linalg.svd(m)
+ rotpart = u.dot(vh)
+ theta = np.arccos((np.trace(rotpart) - 1)/2)
+ axis = (1/(2*np.sin(theta))) * np.array([[rotpart[2,1] - rotpart[1,2], rotpart[0,2]-rotpart[2,0], rotpart[1,0]-rotpart[0,1]]])
+ return theta, axis
+
+
+for (i,fname) in enumerate(fnames):
+ f = np.load(fname)
+ (current_xyz, current_rgb, current_keypts,demo_xyz, demo_rgb, demo_keypts) = \
+ [f[key] for key in ["current_xyz", "current_rgb", "exec_keypts", "demo_xyz", "demo_rgb", "demo_keypts"]]
+
+
+
+ rot_coefs = .01*np.array((0.01,0.01,0.0025))
+ scale_coef= .01*.01
+ fastmath.set_coeffs(rot_coefs, scale_coef)
+ rfunc = fastmath.rot_reg
+ lin_ag, trans_g, w_ng = tps.tps_fit_regrot(demo_keypts, current_keypts, bend_coef = 1, rfunc=rfunc)
+ #spline.fit(demo_keypts, current_keypts,bend_coef=10,rot_coef=.1)
+ print "-----------------------------"
+ print "dataset %i"%i
+ centertrans_g = tps.tps_eval([demo_keypts.mean(axis=0)], lin_ag, trans_g, w_ng, demo_keypts)[0]-demo_keypts.mean(axis=0)
+ print "translation of center:\n", centertrans_g
+ print "linear:\n",lin_ag
+ print "axis-angle", axisanglepart(lin_ag.T)
+ print "singular vals", np.linalg.svd(lin_ag.T)[1]
+ print "nonlinear:\n",w_ng
+ print "residuals:\n",tps.tps_eval(demo_keypts, lin_ag, trans_g, w_ng, demo_keypts)-current_keypts
+ max_residual = (tps.tps_eval(demo_keypts, lin_ag, trans_g, w_ng, demo_keypts)-current_keypts).max()
+ print "max residual:%.3f"%max_residual
+ if max_residual > .015:
+ print colorize("warning: large residual (%.3f)"%max_residual,'red',highlight=True)
+ if np.linalg.norm(centertrans_g) > .4:
+ print colorize("warning: large translation (%.3f)"%np.linalg.norm(centertrans_g),'red',highlight=True)
+
+ if PLOT:
+ import mayavi.mlab as mlab
+ mlab.clf()
+ x,y,z = demo_keypts.T
+ mlab.points3d(x,y,z,color=(0,1,0))
+ x,y,z = current_keypts.T
+ mlab.points3d(x,y,z,color=(1,0,0))
+ for (demopt, curpt) in zip(demo_keypts, current_keypts):
+ mlab.plot3d([demopt[0],curpt[0]], [demopt[1],curpt[1]], [demopt[2], curpt[2]],color=(1,1,0),tube_radius=.001)
+ mlab.show()
diff --git a/jds_utils/shortest_paths.py b/jds_utils/shortest_paths.py
new file mode 100644
index 0000000..53dc096
--- /dev/null
+++ b/jds_utils/shortest_paths.py
@@ -0,0 +1,34 @@
+import numpy as np
+
+def shortest_paths(ncost_nk,ecost_nkk):
+ """
+ Find minimum cost paths through graph (one path for each end point)
+ where all nodes in nth row are connected to all nodes in (n+1)st row
+ ncost_nk: node costs
+ ecost_nkk: edge costs
+
+ returns: (paths, costs) where
+ paths is a K x N array of integers. Each row gives the minimum-cost path
+ to its final node.
+ costs has length K and gives the cost of each path
+ """
+ N = len(ncost_nk)
+ assert len(ecost_nkk) == N-1
+
+ cost_nk = [None for _ in xrange(N)]
+ prev_nk = [None for _ in xrange(N-1)]
+ cost_nk[0] = ncost_nk[0]
+ for n in xrange(1,N):
+ cost_kk = ecost_nkk[n-1] + cost_nk[n-1][:,None] + ncost_nk[n][None,:]
+ cost_nk[n] = cost_kk.min(axis=0)
+ prev_nk[n-1] = cost_kk.argmin(axis=0)
+
+
+ path_costs = cost_nk[N-1]
+ paths = [None for _ in xrange(N)]
+
+ paths[N-1] = np.arange(len(ncost_nk[-1]))
+ for n in xrange(N-1,0,-1):
+ paths[n-1] = prev_nk[n-1][paths[n]]
+
+ return np.array(paths).T, path_costs
\ No newline at end of file
diff --git a/joint_states_listener/CMakeLists.txt b/joint_states_listener/CMakeLists.txt
new file mode 100644
index 0000000..853165b
--- /dev/null
+++ b/joint_states_listener/CMakeLists.txt
@@ -0,0 +1,30 @@
+cmake_minimum_required(VERSION 2.4.6)
+include($ENV{ROS_ROOT}/core/rosbuild/rosbuild.cmake)
+
+# Set the build type. Options are:
+# Coverage : w/ debug symbols, w/o optimization, w/ code-coverage
+# Debug : w/ debug symbols, w/o optimization
+# Release : w/o debug symbols, w/ optimization
+# RelWithDebInfo : w/ debug symbols, w/ optimization
+# MinSizeRel : w/o debug symbols, w/ optimization, stripped binaries
+#set(ROS_BUILD_TYPE RelWithDebInfo)
+
+rosbuild_init()
+
+#set the default path for built executables to the "bin" directory
+set(EXECUTABLE_OUTPUT_PATH ${PROJECT_SOURCE_DIR}/bin)
+#set the default path for built libraries to the "lib" directory
+set(LIBRARY_OUTPUT_PATH ${PROJECT_SOURCE_DIR}/lib)
+
+#uncomment if you have defined messages
+#rosbuild_genmsg()
+#uncomment if you have defined services
+rosbuild_gensrv()
+
+#common commands for building c++ executables and libraries
+#rosbuild_add_library(${PROJECT_NAME} src/example.cpp)
+#target_link_libraries(${PROJECT_NAME} another_library)
+#rosbuild_add_boost_directories()
+#rosbuild_link_boost(${PROJECT_NAME} thread)
+#rosbuild_add_executable(example examples/example.cpp)
+#target_link_libraries(example ${PROJECT_NAME})
diff --git a/joint_states_listener/Makefile b/joint_states_listener/Makefile
new file mode 100644
index 0000000..b75b928
--- /dev/null
+++ b/joint_states_listener/Makefile
@@ -0,0 +1 @@
+include $(shell rospack find mk)/cmake.mk
\ No newline at end of file
diff --git a/joint_states_listener/mainpage.dox b/joint_states_listener/mainpage.dox
new file mode 100644
index 0000000..7198267
--- /dev/null
+++ b/joint_states_listener/mainpage.dox
@@ -0,0 +1,14 @@
+/**
+\mainpage
+\htmlinclude manifest.html
+
+\b joint_states_listener
+
+
+
+-->
+
+
+*/
diff --git a/joint_states_listener/manifest.xml b/joint_states_listener/manifest.xml
new file mode 100644
index 0000000..d4b42f1
--- /dev/null
+++ b/joint_states_listener/manifest.xml
@@ -0,0 +1,14 @@
+
+
+
+ joint_states_listener
+
+
+ Mallory
+ BSD
+
+ http://ros.org/wiki/joint_states_listener
+
+
+
+
diff --git a/joint_states_listener/nodes/joint_states_listener.py b/joint_states_listener/nodes/joint_states_listener.py
new file mode 100755
index 0000000..265330d
--- /dev/null
+++ b/joint_states_listener/nodes/joint_states_listener.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python
+#spins off a thread to listen for joint_states messages
+#and provides the same information (or subsets of) as a service
+
+import roslib
+roslib.load_manifest('joint_states_listener')
+import rospy
+from joint_states_listener.srv import *
+from sensor_msgs.msg import JointState
+import threading
+
+
+#holds the latest states obtained from joint_states messages
+class LatestJointStates:
+
+ def __init__(self):
+ rospy.init_node('joint_states_listener')
+ self.lock = threading.Lock()
+ self.name = []
+ self.position = []
+ self.velocity = []
+ self.effort = []
+ self.thread = threading.Thread(target=self.joint_states_listener)
+ self.thread.start()
+
+ s = rospy.Service('return_joint_states', ReturnJointStates, self.return_joint_states)
+
+
+ #thread function: listen for joint_states messages
+ def joint_states_listener(self):
+ rospy.Subscriber('joint_states', JointState, self.joint_states_callback)
+ rospy.spin()
+
+
+ #callback function: when a joint_states message arrives, save the values
+ def joint_states_callback(self, msg):
+ self.lock.acquire()
+ self.name = msg.name
+ self.position = msg.position
+ self.velocity = msg.velocity
+ self.effort = msg.effort
+ self.lock.release()
+
+
+ #returns (found, position, velocity, effort) for the joint joint_name
+ #(found is 1 if found, 0 otherwise)
+ def return_joint_state(self, joint_name):
+
+ #no messages yet
+ if self.name == []:
+ rospy.logerr("No robot_state messages received!\n")
+ return (0, 0., 0., 0.)
+
+ #return info for this joint
+ self.lock.acquire()
+ if joint_name in self.name:
+ index = self.name.index(joint_name)
+ position = self.position[index]
+ velocity = self.velocity[index]
+ effort = self.effort[index]
+
+ #unless it's not found
+ else:
+ rospy.logerr("Joint %s not found!", (joint_name,))
+ self.lock.release()
+ return (0, 0., 0., 0.)
+ self.lock.release()
+ return (1, position, velocity, effort)
+
+
+ #server callback: returns arrays of position, velocity, and effort
+ #for a list of joints specified by name
+ def return_joint_states(self, req):
+ joints_found = []
+ positions = []
+ velocities = []
+ efforts = []
+ for joint_name in req.name:
+ (found, position, velocity, effort) = self.return_joint_state(joint_name)
+ joints_found.append(found)
+ positions.append(position)
+ velocities.append(velocity)
+ efforts.append(effort)
+ return ReturnJointStatesResponse(joints_found, positions, velocities, efforts)
+
+
+#run the server
+if __name__ == "__main__":
+
+ latestjointstates = LatestJointStates()
+
+ print "joints_states_listener server started, waiting for queries"
+ rospy.spin()
diff --git a/joint_states_listener/nodes/joint_states_trajectory_recorder.py b/joint_states_listener/nodes/joint_states_trajectory_recorder.py
new file mode 100755
index 0000000..6a8fc6d
--- /dev/null
+++ b/joint_states_listener/nodes/joint_states_trajectory_recorder.py
@@ -0,0 +1,109 @@
+#!/usr/bin/env python
+#test client for joint_states_listener
+
+import roslib
+roslib.load_manifest('joint_states_listener')
+import rospy
+import rosbag
+from joint_states_listener.srv import ReturnJointStates
+import sys
+import numpy as np
+
+def call_return_joint_states(joint_names):
+ rospy.wait_for_service("return_joint_states")
+ try:
+ s = rospy.ServiceProxy("return_joint_states", ReturnJointStates)
+ resp = s(joint_names)
+ except rospy.ServiceException, e:
+ print "error when calling return_joint_states: %s"%e
+ sys.exit(1)
+ for (ind, joint_name) in enumerate(joint_names):
+ if(not resp.found[ind]):
+ print "joint %s not found!"%joint_name
+ return (resp.position, resp.velocity, resp.effort)
+
+
+## Pretty-print list to string
+def pplist(list):
+ return ' '.join(['%2.3f'%x for x in list])
+
+
+## Record arm and gripper trajectories
+if __name__ == "__main__":
+ rospy.init_node('joint_states_trajectory_recorder')
+
+ traj_file_path = '/home/mallory/fuerte_workspace/sandbox/suturing/pr2_suturing/joint_states_listener/trajectories'
+
+ record_time = rospy.Duration.from_sec(300.0) #pt 1, 2
+ #record_time = rospy.Duration.from_sec(240.0) #pt 3, 4, 5
+
+ traj_larm = []
+ traj_rarm = []
+ traj_lgrip = []
+ traj_rgrip = []
+
+ joint_names = ["l_shoulder_pan_joint",
+ "l_shoulder_lift_joint",
+ "l_upper_arm_roll_joint",
+ "l_elbow_flex_joint",
+ "l_forearm_roll_joint",
+ "l_wrist_flex_joint",
+ "l_wrist_roll_joint",
+ "r_shoulder_pan_joint",
+ "r_shoulder_lift_joint",
+ "r_upper_arm_roll_joint",
+ "r_elbow_flex_joint",
+ "r_forearm_roll_joint",
+ "r_wrist_flex_joint",
+ "r_wrist_roll_joint",
+ "l_gripper_joint",
+ "r_gripper_joint"]
+
+ while(1):
+ if rospy.Time.now().secs > 0:
+ break
+ else:
+ continue
+
+ print 'record_time:', record_time.secs
+ start_time = rospy.Time.now().secs
+
+ while((rospy.Time.now().secs - start_time) < record_time.secs):
+
+ if (rospy.Time.now().secs - start_time) % 5 == 0:
+ print 'time left:', record_time.secs - (rospy.Time.now().secs - start_time)
+
+ (position, velocity, effort) = call_return_joint_states(joint_names)
+
+ #print "position:", pplist(position)
+ #print "velocity:", pplist(velocity)
+ #print "effort: ", pplist(effort)
+
+ traj_larm.append(position[0:7])
+ traj_rarm.append(position[7:14])
+ traj_lgrip.append(position[14])
+ traj_rgrip.append(position[15])
+
+ print 'end_time:', rospy.Time.now().secs
+ print 'finished recording'
+
+ ## Save suture trajectories to file
+ np.save(traj_file_path + '/full_running_suture/pt1_tj_larm.npy', np.array(traj_larm))
+ np.save(traj_file_path + '/full_running_suture/pt1_tj_rarm.npy', np.array(traj_rarm))
+ np.save(traj_file_path + '/full_running_suture/pt1_tj_lgrip.npy', np.array(traj_lgrip))
+ np.save(traj_file_path + '/full_running_suture/pt1_tj_rgrip.npy', np.array(traj_rgrip))
+
+## running suture:
+## height: 3rd bolt from the bottom on the side of the bot is level with bottom of foam
+## pt1: pierce both flaps, pull twice with right hand, pass needle to right hand, pull with left hand until through,
+## put needle in stand, end in 'side'
+## pt2: left 2 loop knot (needle not perfectly in stand at end)
+## pt3: right 1 loop knot
+## pt4: left 1 loop knot
+## pt5: right 1 loop knot
+## pt6: needle through both flaps, pull twice with right hand, pass needle to right hand, pull twice with left hand,
+## put needle in stand, end in 'side'
+## ...
+
+
+
diff --git a/joint_states_listener/nodes/joint_states_trajectory_recorder2.py b/joint_states_listener/nodes/joint_states_trajectory_recorder2.py
new file mode 100755
index 0000000..9ad4b5a
--- /dev/null
+++ b/joint_states_listener/nodes/joint_states_trajectory_recorder2.py
@@ -0,0 +1,111 @@
+#!/usr/bin/env python
+#test client for joint_states_listener
+
+import roslib
+roslib.load_manifest('joint_states_listener')
+import rospy
+import rosbag
+from joint_states_listener.srv import ReturnJointStates
+import sys
+import numpy as np
+import threading as th
+
+
+def call_return_joint_states(joint_names):
+ rospy.wait_for_service("return_joint_states")
+ try:
+ s = rospy.ServiceProxy("return_joint_states", ReturnJointStates)
+ resp = s(joint_names)
+ except rospy.ServiceException, e:
+ print "error when calling return_joint_states: %s"%e
+ sys.exit(1)
+ for (ind, joint_name) in enumerate(joint_names):
+ if(not resp.found[ind]):
+ print "joint %s not found!"%joint_name
+ return (resp.position, resp.velocity, resp.effort)
+
+
+## Pretty-print list to string
+def pplist(list):
+ return ' '.join(['%2.3f'%x for x in list])
+
+
+## Class to signal when to stop recording
+class AskDone(th.Thread):
+
+ def __init__(self):
+ th.Thread.__init__(self)
+ self.done = False
+
+ def run(self):
+ y = raw_input('Press any key to stop recording')
+ self.done = True
+
+
+## Record arm and gripper trajectories
+if __name__ == "__main__":
+ rospy.init_node('joint_states_trajectory_recorder')
+
+ traj_file_path = '/home/mallory/fuerte_workspace/sandbox/suturing/pr2_suturing/joint_states_listener/trajectories'
+ traj_name = '/full_interrupted_suture/pt12'
+
+ traj_larm = []
+ traj_rarm = []
+ traj_lgrip = []
+ traj_rgrip = []
+
+ joint_names = ["l_shoulder_pan_joint",
+ "l_shoulder_lift_joint",
+ "l_upper_arm_roll_joint",
+ "l_elbow_flex_joint",
+ "l_forearm_roll_joint",
+ "l_wrist_flex_joint",
+ "l_wrist_roll_joint",
+ "r_shoulder_pan_joint",
+ "r_shoulder_lift_joint",
+ "r_upper_arm_roll_joint",
+ "r_elbow_flex_joint",
+ "r_forearm_roll_joint",
+ "r_wrist_flex_joint",
+ "r_wrist_roll_joint",
+ "l_gripper_joint",
+ "r_gripper_joint"]
+
+ while(1):
+ if rospy.Time.now().secs > 0:
+ break
+ else:
+ continue
+
+ start_time = rospy.Time.now().secs
+ print 'Start Time:', start_time
+
+ print 'Recording ...'
+ ask_done = AskDone()
+ ask_done.start()
+
+ while (not ask_done.done):
+
+ (position, velocity, effort) = call_return_joint_states(joint_names)
+
+ traj_larm.append(position[0:7])
+ traj_rarm.append(position[7:14])
+ traj_lgrip.append(position[14])
+ traj_rgrip.append(position[15])
+
+ print 'End Time:', rospy.Time.now().secs
+ print 'Recording Duration:', rospy.Time.now().secs - start_time
+
+ ask_done.join()
+
+ print 'Finished recording'
+
+ ## Save trajectories to file
+ np.save(traj_file_path + traj_name + '_tj_larm.npy' , np.array(traj_larm))
+ np.save(traj_file_path + traj_name + '_tj_rarm.npy' , np.array(traj_rarm))
+ np.save(traj_file_path + traj_name + '_tj_lgrip.npy', np.array(traj_lgrip))
+ np.save(traj_file_path + traj_name + '_tj_rgrip.npy', np.array(traj_rgrip))
+
+ print 'Trajectories saved to file'
+
+
diff --git a/joint_states_listener/src/joint_states_listener/__init__.py b/joint_states_listener/src/joint_states_listener/__init__.py
new file mode 100644
index 0000000..b23e43d
--- /dev/null
+++ b/joint_states_listener/src/joint_states_listener/__init__.py
@@ -0,0 +1 @@
+#autogenerated by ROS python message generators
\ No newline at end of file
diff --git a/joint_states_listener/src/joint_states_listener/srv/_ReturnJointStates.py b/joint_states_listener/src/joint_states_listener/srv/_ReturnJointStates.py
new file mode 100644
index 0000000..51325ff
--- /dev/null
+++ b/joint_states_listener/src/joint_states_listener/srv/_ReturnJointStates.py
@@ -0,0 +1,330 @@
+"""autogenerated by genpy from joint_states_listener/ReturnJointStatesRequest.msg. Do not edit."""
+import sys
+python3 = True if sys.hexversion > 0x03000000 else False
+import genpy
+import struct
+
+
+class ReturnJointStatesRequest(genpy.Message):
+ _md5sum = "3f2d21c30868b92dc41a0431bacd47b2"
+ _type = "joint_states_listener/ReturnJointStatesRequest"
+ _has_header = False #flag to mark the presence of a Header object
+ _full_text = """string[] name
+
+"""
+ __slots__ = ['name']
+ _slot_types = ['string[]']
+
+ def __init__(self, *args, **kwds):
+ """
+ Constructor. Any message fields that are implicitly/explicitly
+ set to None will be assigned a default value. The recommend
+ use is keyword arguments as this is more robust to future message
+ changes. You cannot mix in-order arguments and keyword arguments.
+
+ The available fields are:
+ name
+
+ :param args: complete set of field values, in .msg order
+ :param kwds: use keyword arguments corresponding to message field names
+ to set specific fields.
+ """
+ if args or kwds:
+ super(ReturnJointStatesRequest, self).__init__(*args, **kwds)
+ #message fields cannot be None, assign default values for those that are
+ if self.name is None:
+ self.name = []
+ else:
+ self.name = []
+
+ def _get_types(self):
+ """
+ internal API method
+ """
+ return self._slot_types
+
+ def serialize(self, buff):
+ """
+ serialize message into buffer
+ :param buff: buffer, ``StringIO``
+ """
+ try:
+ length = len(self.name)
+ buff.write(_struct_I.pack(length))
+ for val1 in self.name:
+ length = len(val1)
+ if python3 or type(val1) == unicode:
+ val1 = val1.encode('utf-8')
+ length = len(val1)
+ buff.write(struct.pack(' 0x03000000 else False
+import genpy
+import struct
+
+
+class ReturnJointStatesResponse(genpy.Message):
+ _md5sum = "3a36649f5b1439b638a41d18af93e9a4"
+ _type = "joint_states_listener/ReturnJointStatesResponse"
+ _has_header = False #flag to mark the presence of a Header object
+ _full_text = """uint32[] found
+float64[] position
+float64[] velocity
+float64[] effort
+
+
+"""
+ __slots__ = ['found','position','velocity','effort']
+ _slot_types = ['uint32[]','float64[]','float64[]','float64[]']
+
+ def __init__(self, *args, **kwds):
+ """
+ Constructor. Any message fields that are implicitly/explicitly
+ set to None will be assigned a default value. The recommend
+ use is keyword arguments as this is more robust to future message
+ changes. You cannot mix in-order arguments and keyword arguments.
+
+ The available fields are:
+ found,position,velocity,effort
+
+ :param args: complete set of field values, in .msg order
+ :param kwds: use keyword arguments corresponding to message field names
+ to set specific fields.
+ """
+ if args or kwds:
+ super(ReturnJointStatesResponse, self).__init__(*args, **kwds)
+ #message fields cannot be None, assign default values for those that are
+ if self.found is None:
+ self.found = []
+ if self.position is None:
+ self.position = []
+ if self.velocity is None:
+ self.velocity = []
+ if self.effort is None:
+ self.effort = []
+ else:
+ self.found = []
+ self.position = []
+ self.velocity = []
+ self.effort = []
+
+ def _get_types(self):
+ """
+ internal API method
+ """
+ return self._slot_types
+
+ def serialize(self, buff):
+ """
+ serialize message into buffer
+ :param buff: buffer, ``StringIO``
+ """
+ try:
+ length = len(self.found)
+ buff.write(_struct_I.pack(length))
+ pattern = '<%sI'%length
+ buff.write(struct.pack(pattern, *self.found))
+ length = len(self.position)
+ buff.write(_struct_I.pack(length))
+ pattern = '<%sd'%length
+ buff.write(struct.pack(pattern, *self.position))
+ length = len(self.velocity)
+ buff.write(_struct_I.pack(length))
+ pattern = '<%sd'%length
+ buff.write(struct.pack(pattern, *self.velocity))
+ length = len(self.effort)
+ buff.write(_struct_I.pack(length))
+ pattern = '<%sd'%length
+ buff.write(struct.pack(pattern, *self.effort))
+ except struct.error as se: self._check_types(se)
+ except TypeError as te: self._check_types(te)
+
+ def deserialize(self, str):
+ """
+ unpack serialized message in str into this message instance
+ :param str: byte array of serialized message, ``str``
+ """
+ try:
+ end = 0
+ start = end
+ end += 4
+ (length,) = _struct_I.unpack(str[start:end])
+ pattern = '<%sI'%length
+ start = end
+ end += struct.calcsize(pattern)
+ self.found = struct.unpack(pattern, str[start:end])
+ start = end
+ end += 4
+ (length,) = _struct_I.unpack(str[start:end])
+ pattern = '<%sd'%length
+ start = end
+ end += struct.calcsize(pattern)
+ self.position = struct.unpack(pattern, str[start:end])
+ start = end
+ end += 4
+ (length,) = _struct_I.unpack(str[start:end])
+ pattern = '<%sd'%length
+ start = end
+ end += struct.calcsize(pattern)
+ self.velocity = struct.unpack(pattern, str[start:end])
+ start = end
+ end += 4
+ (length,) = _struct_I.unpack(str[start:end])
+ pattern = '<%sd'%length
+ start = end
+ end += struct.calcsize(pattern)
+ self.effort = struct.unpack(pattern, str[start:end])
+ return self
+ except struct.error as e:
+ raise genpy.DeserializationError(e) #most likely buffer underfill
+
+
+ def serialize_numpy(self, buff, numpy):
+ """
+ serialize message with numpy array types into buffer
+ :param buff: buffer, ``StringIO``
+ :param numpy: numpy python module
+ """
+ try:
+ length = len(self.found)
+ buff.write(_struct_I.pack(length))
+ pattern = '<%sI'%length
+ buff.write(self.found.tostring())
+ length = len(self.position)
+ buff.write(_struct_I.pack(length))
+ pattern = '<%sd'%length
+ buff.write(self.position.tostring())
+ length = len(self.velocity)
+ buff.write(_struct_I.pack(length))
+ pattern = '<%sd'%length
+ buff.write(self.velocity.tostring())
+ length = len(self.effort)
+ buff.write(_struct_I.pack(length))
+ pattern = '<%sd'%length
+ buff.write(self.effort.tostring())
+ except struct.error as se: self._check_types(se)
+ except TypeError as te: self._check_types(te)
+
+ def deserialize_numpy(self, str, numpy):
+ """
+ unpack serialized message in str into this message instance using numpy for array types
+ :param str: byte array of serialized message, ``str``
+ :param numpy: numpy python module
+ """
+ try:
+ end = 0
+ start = end
+ end += 4
+ (length,) = _struct_I.unpack(str[start:end])
+ pattern = '<%sI'%length
+ start = end
+ end += struct.calcsize(pattern)
+ self.found = numpy.frombuffer(str[start:end], dtype=numpy.uint32, count=length)
+ start = end
+ end += 4
+ (length,) = _struct_I.unpack(str[start:end])
+ pattern = '<%sd'%length
+ start = end
+ end += struct.calcsize(pattern)
+ self.position = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
+ start = end
+ end += 4
+ (length,) = _struct_I.unpack(str[start:end])
+ pattern = '<%sd'%length
+ start = end
+ end += struct.calcsize(pattern)
+ self.velocity = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
+ start = end
+ end += 4
+ (length,) = _struct_I.unpack(str[start:end])
+ pattern = '<%sd'%length
+ start = end
+ end += struct.calcsize(pattern)
+ self.effort = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
+ return self
+ except struct.error as e:
+ raise genpy.DeserializationError(e) #most likely buffer underfill
+
+_struct_I = genpy.struct_I
+class ReturnJointStates(object):
+ _type = 'joint_states_listener/ReturnJointStates'
+ _md5sum = 'ce9bd2b56c904b190a782a08482fb4e9'
+ _request_class = ReturnJointStatesRequest
+ _response_class = ReturnJointStatesResponse
diff --git a/joint_states_listener/src/joint_states_listener/srv/__init__.py b/joint_states_listener/src/joint_states_listener/srv/__init__.py
new file mode 100644
index 0000000..93a9d72
--- /dev/null
+++ b/joint_states_listener/src/joint_states_listener/srv/__init__.py
@@ -0,0 +1 @@
+from ._ReturnJointStates import *
diff --git a/joint_states_listener/srv/ReturnJointStates.srv b/joint_states_listener/srv/ReturnJointStates.srv
new file mode 100644
index 0000000..b827353
--- /dev/null
+++ b/joint_states_listener/srv/ReturnJointStates.srv
@@ -0,0 +1,6 @@
+string[] name
+---
+uint32[] found
+float64[] position
+float64[] velocity
+float64[] effort
diff --git a/joint_states_listener/srv_gen/cpp/include/joint_states_listener/ReturnJointStates.h b/joint_states_listener/srv_gen/cpp/include/joint_states_listener/ReturnJointStates.h
new file mode 100644
index 0000000..1d32d3d
--- /dev/null
+++ b/joint_states_listener/srv_gen/cpp/include/joint_states_listener/ReturnJointStates.h
@@ -0,0 +1,304 @@
+/* Auto-generated by genmsg_cpp for file /home/mallory/fuerte_workspace/sandbox/suturing/python/joint_states_listener/srv/ReturnJointStates.srv */
+#ifndef JOINT_STATES_LISTENER_SERVICE_RETURNJOINTSTATES_H
+#define JOINT_STATES_LISTENER_SERVICE_RETURNJOINTSTATES_H
+#include
+#include
+#include