code stringlengths 1 1.49M | vector listlengths 0 7.38k | snippet listlengths 0 7.38k |
|---|---|---|
#!/usr/bin/python
import roslib
roslib.load_manifest("hrl_pr2_arms")
roslib.load_manifest("hrl_generic_arms")
roslib.load_manifest("hrl_lib")
roslib.load_manifest("hrl_msgs")
roslib.load_manifest("ar_pose")
import math, time, copy
import numpy as np
import tf, rospy, actionlib
import hrl_lib.transforms as hrl_tr
import tf.transformations as tf_trans
import hrl_pr2_arms.pr2_controller_switcher as pr2cs
import hrl_pr2_arms.pr2_arm as pr2arm
import hrl_generic_arms.ep_trajectory_controller as eptc
from actionlib_msgs.msg import *
from pr2_controllers_msgs.msg import *
from ar_pose.msg import ARMarkers
from std_msgs.msg import String, Bool
from hrl_msgs.msg import StringArray
class head():
def __init__(self):
# rospy.init_node('move_the_head', anonymous=True)
self.client = actionlib.SimpleActionClient('/head_traj_controller/point_head_action',
PointHeadAction)
self.client.wait_for_server()
self.goal = PointHeadGoal()
def set_pose(self, pos):
self.goal.target.header.frame_id = 'torso_lift_link'
self.goal.target.point.x = pos[0]
self.goal.target.point.y = pos[1]
self.goal.target.point.z = pos[2]
self.goal.min_duration = rospy.Duration(3.)
rospy.logout('Sending Head Goal')
self.client.send_goal(self.goal)
self.client.wait_for_result()
if self.client.get_state() == GoalStatus.SUCCEEDED:
print "Succeeded"
else:
print "Failed"
class torso():
def __init__(self):
self.client = actionlib.SimpleActionClient('/torso_controller/position_joint_action',
SingleJointPositionAction)
self.client.wait_for_server()
self.pos = SingleJointPositionGoal()
def down(self):
# self.pos.position = 0.01
self.pos.position = 0.15
self.pos.min_duration = rospy.Duration(2.)
self.pos.max_velocity = 1.
rospy.logout('Sending torso down')
self.client.send_goal(self.pos)
self.client.wait_for_result()
if self.client.get_state() == GoalStatus.SUCCEEDED:
print "Succeeded"
else:
print "Failed"
class gripper():
def __init__(self):
self.r_client = actionlib.SimpleActionClient('r_gripper_controller/gripper_action',
Pr2GripperCommandAction)
self.r_client.wait_for_server()
self.l_client = actionlib.SimpleActionClient('l_gripper_controller/gripper_action',
Pr2GripperCommandAction)
self.r_client.wait_for_server()
self.state = Pr2GripperCommandGoal()
def Open(self, arm='left_arm'):
if arm == 'left_arm':
client = self.l_client
elif arm == 'right_arm':
client = self.r_client
self.state.command.position = .07
self.state.command.max_effort = -1.
rospy.logout('Open the gripper')
client.send_goal(self.state)
client.wait_for_result()
if client.get_state() == GoalStatus.SUCCEEDED:
print "Succeeded"
else:
print "Failed"
def Close(self, arm='left_arm'):
if arm == 'left_arm':
client = self.l_client
elif arm == 'right_arm':
client = self.r_client
self.state.command.position = 0.
self.state.command.max_effort = 50.
rospy.logout('Close the gripper')
client.send_goal(self.state)
client.wait_for_result()
if client.get_state() == GoalStatus.SUCCEEDED:
print "Succeeded"
else:
print "Failed"
class ar_manipulation():
def __init__(self):
rospy.init_node("ar_manipulation")
# rospy.Subscriber("/ar_pose_markers", ARMarkers, self.read_markers_cb)
# rospy.Subscriber("/ar_object_name", String, self.marker_lookup_cb)
rospy.Subscriber("/ar_object_name", StringArray, self.marker_lookup_cb)
rospy.Subscriber("/put_back_tool", String, self.put_back_tool_cb)
self.pub_rate = rospy.Rate(10)
self.torso = torso()
self.head = head()
self.gripper = gripper()
self.tf_listener = tf.TransformListener()
self.cs = pr2cs.ControllerSwitcher()
self.pr2_init = False
self.search_tag = False
self.found_tag = False
self.grasp_object = False
# Load JTcontroller
self.r_arm_cart = pr2arm.create_pr2_arm('r', pr2arm.PR2ArmJTranspose,
controller_name="%s_cart", timeout=0)
self.l_arm_cart = pr2arm.create_pr2_arm('l', pr2arm.PR2ArmJTranspose,
controller_name="%s_cart", timeout=0)
# Load Joint space controller
self.r_arm = pr2arm.create_pr2_arm('r', pr2arm.PR2ArmJointTrajectory,
controller_name="%s_arm_controller",
timeout=0)
self.l_arm = pr2arm.create_pr2_arm('l', pr2arm.PR2ArmJointTrajectory,
controller_name="%s_arm_controller",
timeout=0)
self.epc = eptc.EPC('linear_move')
self.time_step = 1/20.
def marker_lookup_cb(self,msg):
self.tool = msg.data[0]
if self.tool == 'shaver' or self.tool == 'scratcher':
rospy.logout('Receive request to find tag for '+self.tool)
self.marker_frame = '/ar_'+self.tool
self.arm = msg.data[1]
self.search_tag = True
else:
print 'no valid marker found'
def put_back_tool_cb(self,msg):
duration=10.
self.arm = msg.data
self.switch_arm()
self.arm_controller.wait_for_ep()
if self.grasp_object:
rospy.logout("Putting back the object")
ep_cur = self.arm_controller.get_ep()
ep1 = copy.deepcopy(self.tool_ep)
ep1[0][2] +=.2
self.epc_move_arm(self.arm_controller, ep_cur, ep1, duration)
self.epc_move_arm(self.arm_controller, ep1, self.tool_ep, duration)
self.gripper.Open(self.arm)
time.sleep(2.)
self.epc_move_arm(self.arm_controller, self.tool_ep, ep1, duration)
self.grasp_object = False
def switch_arm(self):
if self.arm == 'left_arm':
side = 'l'
self.arm_controller = self.l_arm_cart
elif self.arm == 'right_arm':
side = 'r'
self.arm_controller = self.r_arm_cart
with self.arm_controller.lock:
self.arm_controller.ep = None
self.cs.carefree_switch(side, '%s_cart',
# "$(find hrl_pr2_arms)/params/j_transpose_params_low.yaml")
"$(find hrl_pr2_arms)/params/j_transpose_params_high.yaml")
def get_angles(self):
self.r_angle = self.r_arm.get_joint_angles()
self.l_angle = self.l_arm.get_joint_angles()
def epc_move_arm(self, arm, ep1, ep2, duration=10.):
self.t_vals = eptc.min_jerk_traj(duration/self.time_step)
traj = arm.interpolate_ep(ep1, ep2, self.t_vals)
tc = eptc.EPTrajectoryControl(arm, traj)
self.epc.epc_motion(tc, self.time_step)
def setup_pr2_init_pose(self):
rospy.logout('Initializing the Robot..'+self.tool)
self.head.set_pose([0.15,0.,0.])
self.torso.down()
self.get_angles()
duration=5.
self.t_vals = eptc.min_jerk_traj(duration/self.time_step)
self.r_ep =np.array([-1.397, 0.375, -1.740, -2.122, -1.966, -1.680, -2.491])
self.l_ep =np.array([1.397, 0.375, 1.740, -2.122, 1.966, -1.680, -3.926])
# self.r_ep =np.array([-1.397, 0.375, -1.740, -2.122, -1.966, -1.680, .651])
# self.l_ep =np.array([1.397, 0.375, 1.740, -2.122, 1.966, -1.680, -.784])
self.cs.carefree_switch('r', '%s_arm_controller')
self.cs.carefree_switch('l', '%s_arm_controller')
self.r_arm.wait_for_ep()
self.l_arm.wait_for_ep()
self.epc_move_arm(self.r_arm, self.r_angle, self.r_ep, duration)
self.epc_move_arm(self.l_arm, self.l_angle, self.l_ep, duration)
self.pr2_init = True
def detect_artag(self):
try:
rospy.logout("Finding the AR tag..")
self.pub_rate.sleep()
(self.ar_pos, rot) = self.tf_listener.lookupTransform("/torso_lift_link",
self.marker_frame, rospy.Time(0))
self.pub_rate.sleep()
gripper_rot = hrl_tr.rotY(math.pi/2) #gripper facing -z direction
self.ar_rot = hrl_tr.quaternion_to_matrix(rot)*gripper_rot
rospy.logout("Found AR tag!\nPosition: "+pplist(self.ar_pos)+"\nQuaterion: "+pplist(rot))
self.ar_ep = []
self.ar_ep.append(np.matrix(self.ar_pos).T)
self.ar_ep.append(self.ar_rot)
self.found_tag = True
except:
rospy.logout('AARtagDetect: Transform failed for '+self.tool)
return False
def fetch_tool(self, duration=5.):
rospy.logout("Moving the "+self.arm+" to fetch the object")
self.switch_arm()
self.arm_controller.wait_for_ep()
ep_cur = self.arm_controller.get_ep()
ep1 = copy.deepcopy(self.ar_ep)
ep1[0][2]=ep_cur[0][2]+.1
self.epc_move_arm(self.arm_controller, ep_cur, ep1, 10)
self.gripper.Open(self.arm)
time.sleep(2.)
self.tool_ep = copy.deepcopy(self.ar_ep)
# self.tool_ep[1] = np.mat(tf_trans.euler_matrix(0, np.pi/2, 0))[:3,:3]
# Kinect on Monty has not been calibrated!
#Offset due to Kinect error
self.tool_ep[0][0]-= .02
# self.tool_ep[0][1]+= .02
self.tool_ep[0][2]+= .025
# self.tool_ep[0][2]-= .05
self.epc_move_arm(self.arm_controller, ep1, self.tool_ep, 15)
self.gripper.Close(self.arm)
time.sleep(2.)
self.epc_move_arm(self.arm_controller, self.tool_ep, ep1, 15)
self.found_tag = False
self.search_tag = False
self.pr2_init = False
self.grasp_object = True
def pplist(list):
return ' '.join(['%2.3f'%x for x in list])
if __name__ == "__main__":
arm = ar_manipulation()
# arm.get_angles()
while not rospy.is_shutdown():
if arm.search_tag:
if not arm.pr2_init:
arm.setup_pr2_init_pose()
arm.detect_artag()
if arm.found_tag:
arm.fetch_tool()
| [
[
1,
0,
0.0067,
0.0033,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.01,
0.0033,
0,
0.66,
0.0417,
630,
3,
1,
0,
0,
0,
0,
1
],
[
8,
0,
0.0134,
0.0033,
0,
0.66... | [
"import roslib",
"roslib.load_manifest(\"hrl_pr2_arms\")",
"roslib.load_manifest(\"hrl_generic_arms\")",
"roslib.load_manifest(\"hrl_lib\")",
"roslib.load_manifest(\"hrl_msgs\")",
"roslib.load_manifest(\"ar_pose\")",
"import math, time, copy",
"import numpy as np",
"import tf, rospy, actionlib",
"... |
#!/usr/bin/python
import roslib
roslib.load_manifest('tf')
roslib.load_manifest('rospy')
roslib.load_manifest('geometry_msgs')
roslib.load_manifest('hrl_lib')
import rospy, optparse, math, time
import numpy as np
import tf
import tf.transformations as tr
import cPickle as pkl
import hrl_lib.transforms as hrl_tr
import hrl_lib.util as ut
from geometry_msgs.msg import TransformStamped
def log_parse():
parser = optparse.OptionParser('Input the source frame name \
and the target frame name')
parser.add_option("-s", "--source", action="store", type="string",\
dest="source_frame", default="l_gripper_tool_frame")
parser.add_option("-t", "--target" , action="store", type="string",\
dest="target_frame",default="base_link")
(options, args) = parser.parse_args()
return options.source_frame, options.target_frame
class tf_frame_publisher():
def __init__(self):
self.source_frame, self.target_frame = log_parse()
self.pub = rospy.Publisher('/frame/'+self.source_frame,\
TransformStamped)
rospy.init_node('pub_tf_'+self.source_frame, anonymous = True)
self.tflistener = tf.TransformListener()
self.pos = np.matrix([0.,0.,0.]).T
self.rot = np.matrix([0.,0.,0.]).T
self.init_rot = np.matrix([0.,0.,0.]).T
self.quat = [0.,0.,0.,0.]
self.tf = TransformStamped()
def listen_pub(self):
while not rospy.is_shutdown():
p, q = self.tflistener.lookupTransform(self.target_frame,\
self.source_frame, rospy.Time(0))
self.tf.header.frame_id = '/'+self.target_frame
self.tf.header.stamp = rospy.Time.now()
self.tf.child_frame_id = '/'+self.source_frame
self.tf.transform.translation.x = p[0]
self.tf.transform.translation.y = p[1]
self.tf.transform.translation.z = p[2]
self.tf.transform.rotation.x = q[0]
self.tf.transform.rotation.y = q[1]
self.tf.transform.rotation.z = q[2]
self.tf.transform.rotation.w = q[3]
self.pub.publish(self.tf)
rospy.sleep(1/100.)
if __name__ == '__main__':
frame = tf_frame_publisher()
rospy.sleep(1)
try:
frame.listen_pub()
except rospy.ROSInterruptException: pass
| [
[
1,
0,
0.0441,
0.0147,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0588,
0.0147,
0,
0.66,
0.0667,
630,
3,
1,
0,
0,
0,
0,
1
],
[
8,
0,
0.0735,
0.0147,
0,
0.... | [
"import roslib",
"roslib.load_manifest('tf')",
"roslib.load_manifest('rospy')",
"roslib.load_manifest('geometry_msgs')",
"roslib.load_manifest('hrl_lib')",
"import rospy, optparse, math, time",
"import numpy as np",
"import tf",
"import tf.transformations as tr",
"import cPickle as pkl",
"import... |
#!/usr/bin/python
import roslib
roslib.load_manifest('tf')
roslib.load_manifest('rospy')
roslib.load_manifest('adl_pr2_log')
roslib.load_manifest('geometry_msgs')
import rospy, optparse, math, time
import numpy as np
import tf
from hrl_lib.msg import WrenchPoseArrayStamped
from geometry_msgs.msg import WrenchStamped
from geometry_msgs.msg import Pose
def log_parse():
parser = optparse.OptionParser('Input the source frame name \
and the target frame name')
parser.add_option("-t", "--tool", action="store", type="string",\
dest="tool_frame", default="l_gripper_tool_frame")
(options, args) = parser.parse_args()
return options.tool_frame
class posearray_wrench_publisher():
def __init__(self):
self.tool_frame = '/'+log_parse()
self.head_frame = '/ellipse_frame'
# self.head_frame = '/'+log_parse()
self.torso_frame = '/torso_lift_link'
self.base_frame = '/base_link'
ft_topic = '/netft_gravity_zeroing/wrench_zeroed'
rospy.init_node('adl_poses_wrench', anonymous = True)
self.pub = rospy.Publisher('/adl_wrench_posearray',\
WrenchPoseArrayStamped)
self.tflistener = tf.TransformListener()
self.force_sub = rospy.Subscriber(ft_topic, WrenchStamped, self.force_cb)
self.msg = WrenchPoseArrayStamped()
self.tool_pose = Pose()
self.head_pose = Pose()
self.torso_pose = Pose()
def force_cb(self, f_msg):
self.msg.wrench = f_msg.wrench
def pose_wrench_pub(self):
while not rospy.is_shutdown():
self.tool_p, self.tool_q = self.tflistener.lookupTransform\
(self.base_frame, self.tool_frame, rospy.Time(0))
self.head_p, self.head_q = self.tflistener.lookupTransform\
(self.base_frame, self.head_frame, rospy.Time(0))
self.torso_p, self.torso_q = self.tflistener.lookupTransform\
(self.base_frame, self.torso_frame, rospy.Time(0))
self.msg.header.stamp = rospy.Time.now()
self.msg.header.frame_id = self.base_frame
self.msg.poses = []
# poses[0] is the tool frame
self.tool_pose.position.x = self.tool_p[0]
self.tool_pose.position.y = self.tool_p[1]
self.tool_pose.position.z = self.tool_p[2]
self.tool_pose.orientation.x = self.tool_q[0]
self.tool_pose.orientation.y = self.tool_q[1]
self.tool_pose.orientation.z = self.tool_q[2]
self.tool_pose.orientation.w = self.tool_q[3]
self.msg.poses.append(self.tool_pose)
# poses[1] is the head frame
self.head_pose.position.x = self.head_p[0]
self.head_pose.position.y = self.head_p[1]
self.head_pose.position.z = self.head_p[2]
self.head_pose.orientation.x = self.head_q[0]
self.head_pose.orientation.y = self.head_q[1]
self.head_pose.orientation.z = self.head_q[2]
self.head_pose.orientation.w = self.head_q[3]
self.msg.poses.append(self.head_pose)
# poses[2] is the tool frame
self.torso_pose.position.x = self.torso_p[0]
self.torso_pose.position.y = self.torso_p[1]
self.torso_pose.position.z = self.torso_p[2]
self.torso_pose.orientation.x = self.torso_q[0]
self.torso_pose.orientation.y = self.torso_q[1]
self.torso_pose.orientation.z = self.torso_q[2]
self.torso_pose.orientation.w = self.torso_q[3]
self.msg.poses.append(self.torso_pose)
self.pub.publish(self.msg)
# print '\nwrench:\n ', self.msg.wrench
# print '\ntool_pose:\n ', self.msg.poses[0]
rospy.sleep(1/100.)
if __name__ == '__main__':
data = posearray_wrench_publisher()
rospy.sleep(1)
try:
data.pose_wrench_pub()
except rospy.ROSInterruptException: pass
| [
[
1,
0,
0.0288,
0.0096,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0385,
0.0096,
0,
0.66,
0.0769,
630,
3,
1,
0,
0,
0,
0,
1
],
[
8,
0,
0.0481,
0.0096,
0,
0.... | [
"import roslib",
"roslib.load_manifest('tf')",
"roslib.load_manifest('rospy')",
"roslib.load_manifest('adl_pr2_log')",
"roslib.load_manifest('geometry_msgs')",
"import rospy, optparse, math, time",
"import numpy as np",
"import tf",
"from hrl_lib.msg import WrenchPoseArrayStamped",
"from geometry_... |
#!/usr/bin/python
import roslib
roslib.load_manifest('rospy')
roslib.load_manifest('std_msgs')
import rospy, optparse, math, time
import numpy as np
import serial
from std_msgs.msg import Bool
class shaver_pub():
def __init__(self):
self.pub = rospy.Publisher('/ros_switch', Bool)
rospy.init_node('shaver_pwr_pub', anonymous = True)
rospy.logout('shaver_pwr_pub node publishing..')
self.state = False
self.pwr_on = 'Power is on'
self.pwr_off = 'Power is off'
def input_state(self):
raw_input("\nPress Enter to Toggle")
self.state = not self.state
self.pub.publish(Bool(self.state))
if self.state:
print self.pwr_on
else:
print self.pwr_off
if __name__ == '__main__':
pwr = shaver_pub()
while not rospy.is_shutdown():
pwr.input_state()
| [
[
1,
0,
0.0857,
0.0286,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.1143,
0.0286,
0,
0.66,
0.125,
630,
3,
1,
0,
0,
0,
0,
1
],
[
8,
0,
0.1429,
0.0286,
0,
0.6... | [
"import roslib",
"roslib.load_manifest('rospy')",
"roslib.load_manifest('std_msgs')",
"import rospy, optparse, math, time",
"import numpy as np",
"import serial",
"from std_msgs.msg import Bool",
"class shaver_pub():\n\tdef __init__(self):\n\t\tself.pub = rospy.Publisher('/ros_switch', Bool)\n\t\trosp... |
#!/usr/bin/python
import roslib
roslib.load_manifest('tf')
roslib.load_manifest('hrl_lib')
import math, time, optparse
import numpy as np
import tf.transformations as tr
import cPickle as pkl
import scipy.stats as st
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import scipy.optimize as so
import hrl_lib.util as ut
import hrl_lib.transforms as hrl_tr
def parse():
parser = optparse.OptionParser('Input the Pose node name and the ft sensor node name')
parser.add_option("-n", "--name", action="store", type="string",\
dest="file_name", default="test")
(options, args) = parser.parse_args()
print 'Opening file name: ',options.file_name
return options.file_name
def compare(file1,file2):
f1=ut.load_pickle(file1+'.pkl')
f2=ut.load_pickle(file2+'.pkl')
force1 = f1['force']
force2 = f2['force']
fmag1 = []
fmag2 = []
for i in range(len(force1)):
fmag1.append(np.linalg.norm(force1[i]))
for i in range(len(force2)):
fmag2.append(np.linalg.norm(force2[i]))
res = st.ttest_ind(fmag1,fmag2)
print res
if __name__ == '__main__':
file_name = parse()
d = ut.load_pickle(file_name+'.pkl')
log = open('result_'+file_name+'.log','w')
force = d['force']
force_raw = d['force_raw']
rot = d['rot_data']
quat = d['quat']
pos = d['pos']
ptime = np.array(d['ptime'])-d['init_time']
ftime = np.array(d['ftime'])-d['init_time']
init_time = d['init_time']
ptime2 = []
force_mag = []
length = []
accel = []
accel_mag = []
v = []
fx = []
fy = []
fz = []
x = []
y = []
z = []
vx = []
vy = []
vz = []
rotx = []
roty = []
rotz = []
time_diff = []
vel = np.matrix([0.,0.,0.]).T
for i in range(len(pos)):
if i < len(pos)-1:
if ptime[i+1]-ptime[i] > .02:
vel=(pos[i+1]-pos[i])/(ptime[i+1]-ptime[i])
length.append(np.linalg.norm(vel)*(ptime[i+1]-ptime[i]))
ptime2.append(ptime[i])
v.append(np.linalg.norm(vel))
vx.append(vel[0,0])
vy.append(vel[1,0])
vz.append(vel[2,0])
x.append(pos[i][0,0])
y.append(pos[i][1,0])
z.append(pos[i][2,0])
rotx.append(math.degrees(rot[i][0,0]))
roty.append(math.degrees(rot[i][1,0]))
rotz.append(math.degrees(rot[i][2,0]))
for i in range(len(force)):
force_mag.append(np.linalg.norm(force[i]))
fx.append(force[i][0,0])
fy.append(force[i][1,0])
fz.append(force[i][2,0])
# for i in range(len(v)-1):
# a = (v[i+1]-v[i])/.01
# accel.append(a)
# accel_mag.append(np.linalg.norm(a))
print 'time: ', max(ptime)
print 'max speed: ', max(v)
print 'min speed: ', min(v)
path_length = sum(length)
print 'Path Length: ', path_length
print 'max vel: ', max(vx),max(vy),max(vz)
print 'min vel: ', min(vx),min(vy),min(vz)
print 'ave vel: ', np.mean(vx),np.mean(vx),np.mean(vz)
print 'max force: ', max(fx), max(fy), max(fz)
print 'min force: ', min(fx), min(fy), min(fz)
print 'ave force: ', np.mean(fx),np.mean(fx),np.mean(fz)
print 'max force_mag: ', max(force_mag)
print 'min force_mag: ', min(force_mag)
print 'ave force_mag: ', np.mean(force_mag)
print 'std_force_mag: ', np.std(force_mag)
print 'integration of force (N*s): ',sum(np.array(force_mag)*.01)
print >> log, 'Categories x-axis y-axis z-axis'
print >> log, 'max_vel', max(vx),max(vy),max(vz)
print >> log, 'min_vel', min(vx),min(vy),min(vz)
print >> log, 'ave_vel', np.mean(vx),np.mean(vx),np.mean(vz)
print >> log, 'max_force', max(fx), max(fy), max(fz)
print >> log, 'min_force', min(fx), min(fy), min(fz)
print >> log, 'ave_force', np.mean(fx),np.mean(fx),np.mean(fz)
print >> log, 'time', max(ptime)
print >> log, 'path_length', sum(length)
print >> log, 'max_force_mag', max(force_mag)
print >> log, 'min_force_mag', min(force_mag)
print >> log, 'ave_force_mag', np.mean(force_mag)
print >> log, 'std_force_mag', np.std(force_mag)
print >> log, 'int_force_mag',sum(np.array(force_mag)*.01)
fig = plt.figure()
# ax = Axes3D(fig)
# ax.scatter(x,y,z,zdir = 'z')
fig.suptitle(file_name+'_Position')
ax = fig.add_subplot(3,1,1)
ax.plot(ptime,x)
ax.grid(True)
ax.set_ylabel('x (m)')
ax = fig.add_subplot(3,1,2)
ax.plot(ptime,y)
ax.grid(True)
ax.set_ylabel('y (m)')
ax = fig.add_subplot(3,1,3)
ax.plot(ptime,z)
ax.grid(True)
ax.set_ylabel('z (m)')
fig2 = plt.figure()
fig2.suptitle(file_name+'_Force')
ax = fig2.add_subplot(3,1,1)
ax.plot(ftime,fx)
ax.grid(True)
ax.set_ylabel('Fx (N)')
ax = fig2.add_subplot(3,1,2)
ax.plot(ftime,fy)
ax.grid(True)
ax.set_ylabel('Fy (N)')
ax = fig2.add_subplot(3,1,3)
ax.plot(ftime,fz)
ax.grid(True)
ax.set_ylabel('Fz (N)')
fig2b = plt.figure()
fig2b.suptitle(file_name+' 3D Tra')
ax = Axes3D(fig2b)
ax.plot3D(x,y,z)
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.set_zlabel('z')
fig2c = plt.figure()
fig2c.suptitle(file_name+' Force Magnitute')
ax = fig2c.gca()
ax.plot(ftime,force_mag)
ax.set_ylabel('Force (N)')
ax.set_xlabel('Time (s)')
fig3 = plt.figure()
fig3.suptitle(file_name+'_Velocity')
ax = fig3.add_subplot(3,1,1)
ax.plot(ptime2,vx)
ax.grid(True)
ax.set_ylabel('Vx (m/s)')
ax = fig3.add_subplot(3,1,2)
ax.plot(ptime2,vy)
ax.grid(True)
ax.set_ylabel('Vy (m/s)')
ax = fig3.add_subplot(3,1,3)
ax.plot(ptime2,vz)
ax.grid(True)
ax.set_ylabel('Vz (m/s)')
fig3a = plt.figure()
fig3a.suptitle(file_name+' Speed')
ax = fig3a.gca()
ax.plot(ptime2,v)
ax.set_ylabel('Speed (m/s)')
ax.set_xlabel('Time (s)')
fig4 = plt.figure()
fig4.suptitle(file_name+'_rotation')
ax = fig4.add_subplot(3,1,1)
ax.plot(ptime,rotx)
ax.grid(True)
ax.set_ylabel('angle (deg)')
ax = fig4.add_subplot(3,1,2)
ax.plot(ptime,roty)
ax.grid(True)
ax.set_ylabel('angle (deg)')
ax = fig4.add_subplot(3,1,3)
ax.plot(ptime,rotz)
ax.grid(True)
ax.set_ylabel('angle (deg)')
plt.show()
log.close()
# compare('aa_scratch_arm','aa_scratch_face')
| [
[
1,
0,
0.0263,
0.0263,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
1,
0,
0.0526,
0.0263,
0,
0.66,
0.0833,
526,
0,
3,
0,
0,
526,
0,
0
],
[
1,
0,
0.0789,
0.0263,
0,
... | [
"import roslib",
"import math, time, optparse",
"import numpy as np",
"import tf.transformations as tr",
"import cPickle as pkl",
"import scipy.stats as st",
"import matplotlib.pyplot as plt",
"from mpl_toolkits.mplot3d import Axes3D",
"import scipy.optimize as so",
"import hrl_lib.util as ut",
... |
#!/usr/bin/python
import roslib
roslib.load_manifest('stereo_anaglyph')
import rospy
import hrl_camera.ros_camera as rc
import cv
def add_alpha_channel(bgr, alpha_val):
w, h = cv.GetSize(bgr)
bgra = cv.CreateImage((w, h), cv.IPL_DEPTH_8U, 4)
alpha = cv.CreateImage((w, h), cv.IPL_DEPTH_8U, 1)
chan1 = cv.CreateImage((w, h), cv.IPL_DEPTH_8U, 1)
chan2 = cv.CreateImage((w, h), cv.IPL_DEPTH_8U, 1)
chan3 = cv.CreateImage((w, h), cv.IPL_DEPTH_8U, 1)
[cv.Set(c, 0) for c in [chan1, chan2, chan3, bgra, alpha]]
cv.Split(bgr, chan1, chan2, chan3, None)
cv.Set(alpha, (alpha_val))
cv.Merge(chan1, chan2, chan3, alpha, bgra)
return bgra
def remove_channels(in_bgra, channel_indices):
w, h = cv.GetSize(in_bgra)
chan1 = cv.CreateImage((w,h), cv.IPL_DEPTH_8U, 1)
chan2 = cv.CreateImage((w,h), cv.IPL_DEPTH_8U, 1)
chan3 = cv.CreateImage((w,h), cv.IPL_DEPTH_8U, 1)
chan4 = cv.CreateImage((w,h), cv.IPL_DEPTH_8U, 1)
bgra = cv.CreateImage((w,h), cv.IPL_DEPTH_8U, 4)
[cv.Set(c, 0) for c in [chan1, chan2, chan3, chan4, bgra]]
cv.Split(in_bgra, chan1, chan2, chan3, chan4)
chan_list = [chan1, chan2, chan3, chan4]
for i in channel_indices:
chan_list[i] = None
chan_list.append(bgra)
cv.Merge(*tuple(chan_list))
return bgra
def anaglyph(left_color, right_color, correction):
#create oversized image
#result = cv.CreateImage(cv.GetSize(right_color), cv.IPL_DEPTH_8U, 4)
w, h = cv.GetSize(left_color)
bgra = cv.CreateImage((w*2, h), cv.IPL_DEPTH_8U, 4)
cv.Set(bgra, 0)
right_bgra = add_alpha_channel(right_color, round(255/2.)) #cyan (remove red?)
left_bgra = add_alpha_channel(left_color, round(255/2.)) #red (remove blue?, green?)
#remove blue & green from left => red
left_red = remove_channels(left_bgra, [0, 1])
#remove red from right_bgra => cyan
right_cyan = remove_channels(right_bgra, [2])
if correction < 0:
left_area = cv.GetSubRect(bgra, (-correction,0,w,h))
right_area = cv.GetSubRect(bgra, (0, 0, w, h))
valid_area = cv.GetSubRect(bgra, (-correction, 0, w + correction, h))
else:
#copy left & right onto bgra
left_area = cv.GetSubRect(bgra, (0,0,w,h))
right_area = cv.GetSubRect(bgra, (correction, 0, w, h))
valid_area = cv.GetSubRect(bgra, (correction, 0, w - correction, h))
cv.Add(left_red, left_area, left_area)
cv.Add(right_cyan, right_area, right_area)
#return right_cyan
#return left_red
#return left_bgra
#return bgra
return valid_area
if __name__ == '__main__':
import optparse
import time
from sensor_msgs.msg import Image
from cv_bridge.cv_bridge import CvBridge, CvBridgeError
p = optparse.OptionParser()
p.add_option('-c', action='store', default='/wide_stereo', type='string', dest='cam', help='which camera to listen to')
p.add_option('-d', action='store', default=30, type='int', dest='dist', help='separation distance')
p.add_option('-s', action='store_true', dest='headless', help='headless mode')
opt, args = p.parse_args()
cameras = [opt.cam + '/left/image_rect_color',
opt.cam + '/right/image_rect_color']
stereo_listener = rc.ROSStereoListener(cameras)
if not opt.headless:
#cv.NamedWindow('left', 0)
#cv.NamedWindow('right', 0)
cv.NamedWindow('stereo-anaglyph', 0)
cv.ResizeWindow('stereo-anaglyph', 640, 480)
cv.WaitKey(10)
else:
bridge = CvBridge()
image_pub = rospy.Publisher('stereo_anaglyph', Image)
anaglyph_cyan_image_distance_correction = rospy.get_param('anaglyph_dist', opt.dist)
left = 1113937# 65361
right = 1113939#65363
escape = 1048603#27
while not rospy.is_shutdown():
l, r = stereo_listener.next()
red_blue = anaglyph(l, r, anaglyph_cyan_image_distance_correction)
if not opt.headless:
#cv.ShowImage('left', l)
#cv.ShowImage('right', r)
cv.ShowImage('stereo-anaglyph', red_blue)
k = cv.WaitKey(10)
print k
if k == escape:
break
if k == left:
anaglyph_cyan_image_distance_correction = anaglyph_cyan_image_distance_correction - 1
print anaglyph_cyan_image_distance_correction
if k == right:
anaglyph_cyan_image_distance_correction = anaglyph_cyan_image_distance_correction + 1
print anaglyph_cyan_image_distance_correction
else:
rosimage = bridge.cv_to_imgmsg(red_blue, "bgra8")
image_pub.publish(rosimage)
#from opencv import cv
#from opencv import highgui
#from time import sleep
#
#def makeMagic(left, right, out):
# chans=[]
# for i in range(6):
# chans.append(cv.cvCreateImage(cv.cvGetSize(left),8,1))
# cv.cvSplit(left, chans[0], chans[1], chans[2], None);
# cv.cvSplit(right, chans[3], chans[4], chans[5], None);
# cv.cvMerge(chans[3],chans[4],chans[2], None, out);
#
# #cv.cvMerge(None,chans[1],None, None, out);
#
#cam=[]
#def main():
# cam.append(highgui.cvCreateCameraCapture(0))
# cam.append(highgui.cvCreateCameraCapture(1))
# highgui.cvNamedWindow ("carrots", highgui.CV_WINDOW_AUTOSIZE)
#
# uno=highgui.cvQueryFrame(cam[0]);
# dos=highgui.cvQueryFrame(cam[1]);
#
# highgui.cvShowImage("carrots",uno);
# highgui.cvWaitKey(0);
# highgui.cvShowImage("carrots",dos);
# highgui.cvWaitKey(0);
#
# merge=cv.cvCreateImage(cv.cvGetSize(uno),8,3)
# makeMagic(uno, dos, merge)
#
# highgui.cvShowImage("carrots",merge);
# highgui.cvWaitKey(0);
#
# while True :
# uno=highgui.cvQueryFrame(cam[0]);
# dos=highgui.cvQueryFrame(cam[1]);
# makeMagic(uno, dos, merge);
# highgui.cvShowImage("carrots",merge);
# if highgui.cvWaitKey(1)=="s":
# cam.append(cam.pop(0))
# print "tick"
#
#if __name__=="__main__":
# main()
| [
[
1,
0,
0.0099,
0.005,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0149,
0.005,
0,
0.66,
0.125,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0198,
0.005,
0,
0.66,
... | [
"import roslib",
"roslib.load_manifest('stereo_anaglyph')",
"import rospy",
"import hrl_camera.ros_camera as rc",
"import cv",
"def add_alpha_channel(bgr, alpha_val):\n w, h = cv.GetSize(bgr)\n bgra = cv.CreateImage((w, h), cv.IPL_DEPTH_8U, 4)\n alpha = cv.CreateImage((w, h), cv.IPL_DEPTH_8U, 1)... |
#!/usr/bin/python
import roslib
roslib.load_manifest('stereo_anaglyph')
import rospy
import hrl_camera.ros_camera as rc
import cv
def anaglyph(left_color, right_color):
left_mono = cv.CreateImage(cv.GetSize(left_color), cv.IPL_DEPTH_8U, 1)
right_mono = cv.CreateImage(cv.GetSize(right_color), cv.IPL_DEPTH_8U, 1)
green = cv.CreateImage(cv.GetSize(right_color), cv.IPL_DEPTH_8U, 1)
result = cv.CreateImage(cv.GetSize(right_color), cv.IPL_DEPTH_8U, 3)
cv.CvtColor(left_color, left_mono, cv.CV_RGB2GRAY)
cv.CvtColor(right_color, right_mono, cv.CV_RGB2GRAY)
cv.Merge(left_mono, green, right_mono, None, result)
return result
cameras = ['/wide_stereo/left/image_rect_color',
'/wide_stereo/right/image_rect_color']
stereo_listener = rc.ROSStereoListener(cameras)
cv.NamedWindow('stereo-anaglyph', cv.CV_WINDOW_AUTOSIZE)
cv.WaitKey(10)
while not rospy.is_shutdown():
l, r = stereo_listener.next()
red_blue = anaglyph(l, r)
cv.ShowImage('stereo-anaglyph', red_blue)
cv.WaitKey(10)
#from opencv import cv
#from opencv import highgui
#from time import sleep
#
#def makeMagic(left, right, out):
# chans=[]
# for i in range(6):
# chans.append(cv.cvCreateImage(cv.cvGetSize(left),8,1))
# cv.cvSplit(left, chans[0], chans[1], chans[2], None);
# cv.cvSplit(right, chans[3], chans[4], chans[5], None);
# cv.cvMerge(chans[3],chans[4],chans[2], None, out);
#
# #cv.cvMerge(None,chans[1],None, None, out);
#
#cam=[]
#def main():
# cam.append(highgui.cvCreateCameraCapture(0))
# cam.append(highgui.cvCreateCameraCapture(1))
# highgui.cvNamedWindow ("carrots", highgui.CV_WINDOW_AUTOSIZE)
#
# uno=highgui.cvQueryFrame(cam[0]);
# dos=highgui.cvQueryFrame(cam[1]);
#
# highgui.cvShowImage("carrots",uno);
# highgui.cvWaitKey(0);
# highgui.cvShowImage("carrots",dos);
# highgui.cvWaitKey(0);
#
# merge=cv.cvCreateImage(cv.cvGetSize(uno),8,3)
# makeMagic(uno, dos, merge)
#
# highgui.cvShowImage("carrots",merge);
# highgui.cvWaitKey(0);
#
# while True :
# uno=highgui.cvQueryFrame(cam[0]);
# dos=highgui.cvQueryFrame(cam[1]);
# makeMagic(uno, dos, merge);
# highgui.cvShowImage("carrots",merge);
# if highgui.cvWaitKey(1)=="s":
# cam.append(cam.pop(0))
# print "tick"
#
#if __name__=="__main__":
# main()
| [
[
1,
0,
0.0185,
0.0093,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0278,
0.0093,
0,
0.66,
0.1,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.037,
0.0093,
0,
0.66,
... | [
"import roslib",
"roslib.load_manifest('stereo_anaglyph')",
"import rospy",
"import hrl_camera.ros_camera as rc",
"import cv",
"def anaglyph(left_color, right_color):\n left_mono = cv.CreateImage(cv.GetSize(left_color), cv.IPL_DEPTH_8U, 1)\n right_mono = cv.CreateImage(cv.GetSize(right_color), cv.IP... |
#Particle filter imports
import pfilter as pf
import robot_motion as rm
import object_motion as om
import detection_appearance as da
#Others
import nodes as nd
import transforms2d as t2d
import numpy as np
import math as mt
import opencv.cv as cv
import types
import functools as ft
velocity = np.matrix([0.01, 0.0]).T
pose = t2d.Pose2D(0.0, 0.0, 0.0)
#Setup
mvar = rm.motion_var()
particles = rm.make_set(mvar, pose, 100)
#Run
motion_model = rm.RobotMotion(mvar)
app_model = da.DetectionAppearance(cov=np.matrix([[(.03*.03), 0], [0, (.03*.03)]]))
filter = pf.PFilter(motion_model, app_model)
display = nd.RobotDisp("particle filter", size = 2, draw_center=True, meters_radius=10)
max_weight = app_model.weight(np.matrix([1.0, 0.0]).T, t2d.Pose2D(1.0, 0.0, 0.0))
draw_func = ft.partial(rm.draw_weighted_Pose2D, display, max_weight)
cur_pos = pose.pos.copy()
cur_set = particles
for i in xrange(100):
display.clear()
cur_set = filter.step(t2d.Pose2D(velocity[0,0], velocity[1,0], 0),
cur_pos, cur_set, draw_func)
scur_pos = display.to_screen(cur_pos)
display.draw(wait=10)
cur_pos = cur_pos + velocity
#print "cur_pos", cur_pos.T
#cur_set = filter.step(control_input=t2d.Pose2D(velocity[0,0], velocity[1,0], 0),
# measurement=cur_pos, particle_set=cur_set)
#cur_pos = cur_pos + velocity
#cur_set = filter.step(control_input=t2d.Pose2D(velocity[0,0], velocity[1,0], 0),
# measurement=cur_pos, particle_set=cur_set)
#cur_pos = cur_pos + velocity
#cur_set = pf.predict(motion_model, t2d.Pose2D(1.0, 0.0, 0.0), cur_set)
#weighted_set = pf.likelihood(app_model, np.matrix([1.0, 0.0]).T, cur_set)
#normalized = pf.normalize_likelihood(weighted_set)
#for i in xrange(100):
# cur_set = filter.step(control_input=t2d.Pose2D(velocity[0,0], velocity[1,0], 0),
# measurement=cur_pos, particle_set=cur_set)
# cur_pos = cur_pos + velocity
#
# display.clear()
# draw_particles(cur_set)
# scur_pos = display.to_screen(cur_pos)
# cv.cvCircle(display.buffer, cv.cvPoint((int) (scur_pos[0,0]), (int) (scur_pos[1,0])),
# 4, cv.cvScalar(100,0,0), cv.CV_FILLED)
# display.draw(wait=10)
#filter.step()
#print "particles"
#for s in particles:
# print s
| [
[
1,
0,
0.0168,
0.0084,
0,
0.66,
0,
180,
0,
1,
0,
0,
180,
0,
0
],
[
1,
0,
0.0252,
0.0084,
0,
0.66,
0.0435,
99,
0,
1,
0,
0,
99,
0,
0
],
[
1,
0,
0.0336,
0.0084,
0,
0.... | [
"import pfilter as pf",
"import robot_motion as rm",
"import object_motion as om",
"import detection_appearance as da",
"import nodes as nd",
"import transforms2d as t2d",
"import numpy as np",
"import math as mt",
"import opencv.cv as cv",
"import types",
"import functools as ft",
"velocity ... |
#!/usr/bin/python
import numpy as np
import random as rd
############################################################################
### Functions implementing a particle filter
############################################################################
# Note:
# To instantiate a particle filter you will need a motion and appearance
# model. Below are signatures and description of the motion and
# appearance models:
# (optional) motion.make_set: (int) -> list state
# motion.predict: (control, state) -> state
# appearance.weight: (measurement, state) -> double
#
# Where what is considered a 'state' must agree between the motion and
# appearance classes.
#
# Optional:
# The particle filter can be sped up by defining additional functions:
# * weight_partial - partial application
# * weight_set - any other optimizations
#
# * predict_partial - partial application
def retTrue( *args ):
return True
def retFalse( *args ):
return False
class PFilter:
def __init__(self, motion, appearance):
""" Makes a particle filter """
self.motion = motion
self.appearance = appearance
def step(self, control_input, measurement, particle_set, draw_func=None, set_op=False, should_resample_func=retTrue):
""" Go through one cycle of particle filtering """
#print particle_set
predictions = predict(self.motion, control_input, particle_set)
#print predictions
weighted_set = likelihood(self.appearance, measurement, predictions)
#print weighted_set
if draw_func is not None:
draw_func(weighted_set)
if should_resample_func():
if set_op:
normalized_set = set_norm_likelihood(weighted_set)
retVal = set_resample_uss(particle_set.shape[1], normalized_set)
else:
normalized_set = normalize_likelihood(weighted_set)
retVal = resample_uss(len(particle_set), normalized_set)
else:
retVal = weighted_set # Change by travis to avoid resampling, but return weights as part of particle set
return retVal
############################################################################
### Helpers
############################################################################
def predict(motion_model, control_input, particle_set):
""" Predict using motion model """
if hasattr(motion_model, "predict_partial"):
f = motion_model.predict_partial(control_input)
predictions = [f(particle) for particle in particle_set]
elif hasattr(motion_model, "predict_set"):
predictions = motion_model.predict_set(control_input, particle_set)
else:
predictions = [motion_model.predict(control_input, particle) for particle in particle_set]
return predictions
def likelihood(appearance_model, measurement, particle_set):
""" Evaluate using appearance model """
if hasattr(appearance_model, "weight_set"):
weighted = appearance_model.weight_set(measurement, particle_set)
elif hasattr(appearance_model, "weight_partial"):
f = appearance_model.weight_partial(measurement)
weighted = [(particle, f(particle)) for particle in particle_set]
else:
weighted = [(particle, appearance_model.weight(measurement, particle)) for particle in particle_set]
return weighted
def resample_uss(num_samples, particles):
"""
Universal stochastic sampler (low variance resampling)
num_samples - number of samples desired
particles - pairs of (state, weight) tuples
"""
samples = []
r = rd.random() * (1.0 / float(num_samples))
c = (particles[0])[1]
i = 0
for m in xrange(num_samples):
U = r + m * (1.0 / float(num_samples))
#print "U", U
while U > c:
i = i + 1
if i >= len(particles):
i = 0
c = c + (particles[i])[1]
samples.append((particles[i])[0])
return samples
def set_resample_uss(num_samples, particles):
"""
Universal stochastic sampler (low variance resampling)
num_samples - number of samples desired
particles - pairs of (state, weight) tuples
"""
##[[ x1 x2 ... ]
## [ y1 y2 ... ]
## [ 0. 0. ... ]
## [ 1. 1. ... ]
## [ w1 w2 ... ]]
samples = np.matrix( np.zeros( (4, num_samples) ))
r = rd.random() * (1.0 / float(num_samples))
c = particles[4,0]
i = 0
for m in xrange(num_samples):
U = r + m * (1.0 / float(num_samples))
#print "U", U
while U > c:
i = i + 1
if i >= particles.shape[1]:
i = 0
c = c + particles[4,i]
samples[:,m] = particles[0:4,i]
return samples
def normalize_likelihood(weighted_particles):
""" Make all the particle weights sum up to 1 """
def add(a,b):
apart, aw = a
bpart, bw = b
return ('', aw+bw)
total_weight = (reduce(add, weighted_particles, ('',0.0)))[1]
def normalize(a):
part, weight = a
return (part, weight/total_weight)
return map(normalize, weighted_particles)
def set_norm_likelihood(weighted_particles):
wp = weighted_particles
wSum = np.sum( wp[4,:] )
wp[4,:] = wp[4,:] / wSum
return wp
if __name__ == "__main__":
particles = [("4", 4), ("1",1), ("2",2), ("3", 3)]
normalized = normalize_likelihood(particles)
#print normalized
num_particles = 1000
new_particles = resample_uss(num_particles, normalized)
#print new_particles
counts = {}
for pair in particles:
name, numb = pair
counts[name] = 0
for p in new_particles:
counts[p] = 1 + counts[p]
for k in counts.keys():
print k, " :", (counts[k] / float(num_particles))
| [
[
1,
0,
0.0112,
0.0056,
0,
0.66,
0,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.0168,
0.0056,
0,
0.66,
0.0909,
715,
0,
1,
0,
0,
715,
0,
0
],
[
2,
0,
0.148,
0.0112,
0,
0... | [
"import numpy as np",
"import random as rd",
"def retTrue( *args ):\n return True",
" return True",
"def retFalse( *args ):\n return False",
" return False",
"class PFilter:\n def __init__(self, motion, appearance):\n \"\"\" Makes a particle filter \"\"\"\n self.motion = mot... |
__all__ = [
'pfilter'
]
| [
[
14,
0,
0.6667,
1,
0,
0.66,
0,
272,
0,
0,
0,
0,
0,
5,
0
]
] | [
"__all__ = [\n'pfilter'\n]"
] |
import numpy as np
import util as ut
import prob as pr
import itertools as it
import types
import opencv.cv as cv
class DetectionAppearance:
def __init__(self, cov):
"""
Appearance model for tracking when there is a detector
available that gives 2d poses of objects.
cov - uncertainty of measurements
measurement - 2x1 matrix
state - 2x1 matrix
"""
self.cov = cov
#self.validation_prob = validation_prob
def weight(self, measurement, particle):
"""
measurement - 2D column numpy.matrix
particle - Pose2D
"""
gauss = pr.Gaussian(m = measurement, v = self.cov)
f = gauss.pdf_mat()
w = f(particle)
return w[0]
def weight_partial(self, measurement):
gauss = pr.Gaussian(m = measurement, v = self.cov)
f = gauss.pdf_mat()
def weightp(particle):
w = f(particle)
return w[0]
return weightp
def weight_set(self, measurement, particle_set):
pos_mat = ut.list_mat_to_mat(particle_set, axis=1)
gauss = pr.Gaussian(m = measurement, v = self.cov)
f = gauss.pdf_mat()
weight_mat = f(pos_mat)
def pair_weights(tup):
part, idx = tup
return (part, weight_mat[idx])
return map(pair_weights, it.izip(particle_set, xrange(len(particle_set))))
def draw_weighted_2D(display, max_weight, particles):
for p in particles:
if type(p) is types.TupleType:
rpos, weight = p
else:
rpos = p
pos = display.to_screen(rpos)
if type(p) is types.TupleType:
color = round(255.0 * (weight/max_weight))
cv.cvCircle(display.buffer, cv.cvPoint((int) (pos[0,0]), (int) (pos[1,0])),
3, cv.cvScalar(255, 255-color, 255), cv.CV_FILLED, cv.CV_AA)
cv.cvCircle(display.buffer, cv.cvPoint((int) (pos[0,0]), (int) (pos[1,0])),
3, cv.cvScalar(200, 200, 200), 1, cv.CV_AA)
else:
cv.cvCircle(display.buffer, cv.cvPoint((int) (pos[0,0]), (int) (pos[1,0])),
2, cv.cvScalar(150, 150, 150), cv.CV_FILLED, cv.CV_AA)
#def weight_matrix(self, measurement):
# gauss = pr.Gaussian(m = measurement, v = self.cov)
# f = gauss.pdf_mat()
# def weightp(particle):
# w = f(particle.pos)
# return w[0]
# return weightp
| [
[
1,
0,
0.0104,
0.0104,
0,
0.66,
0,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.0208,
0.0104,
0,
0.66,
0.1429,
811,
0,
1,
0,
0,
811,
0,
0
],
[
1,
0,
0.0312,
0.0104,
0,
... | [
"import numpy as np",
"import util as ut",
"import prob as pr",
"import itertools as it",
"import types",
"import opencv.cv as cv",
"class DetectionAppearance:\n def __init__(self, cov):\n \"\"\" \n Appearance model for tracking when there is a detector\n available th... |
import util as ut
import math as mt
import numpy as np
from StringIO import StringIO
import transforms2d as t2d
import opencv.cv as cv
import types
class RobotMotion:
"""
Generates sample from robot motion model, has methods used in particle filter
Use:
motion_var- motion_var object
or
rotvar
transvar
"""
def __init__(self, motion_var=None, rotvar=None, transvar=None):
if motion_var!= None:
self.motion_var = motion_var
elif (rotvar != None) and (transvar != None):
self.motion_var = motion_var(rotvar, transvar)
def predict_partial(self, odometry):
return get_odom_sample(self.motion_var, odometry)
def predict(self, odometry, particle):
f = get_odom_sample(self.motion_var, odometry)
return f(particle)
def make_set(cov, start_state, num_particles):
""" Init a bunch of particles to be exactly at start state """
return get_odom_samples(cov, start_state, t2d.Pose2D(0.0, 0.0, 0.0), num_particles)
def make_set_gauss(cov, start_state, num_particles):
""" Initialize a gaussian distribution around start state """
sx = start_state.pos[0]
sy = start_state.pos[1]
#TODO: check this
mean = np.concatentate((start_state, np.matrix([start_state.angle])), axis=0)
def gen_pose(idx):
sample = np.random.multivariate_normal(mean, cov)
return t2d.Pose2D(sample[0,0], sample[1,0], sample[2,0])
return map(gen_pose, range(num_particles))
####################################################################
# Functions for sampling over 2D robot poses
####################################################################
class move2d:
"""
Store 2d movements as 3 components
initial rotation
translation
final rotation
"""
def __init__(self, rot1, trans, rot2):
self.rot1 = rot1
self.trans = trans
self.rot2 = rot2
def __str__(self):
s = StringIO()
print >>s, "( rot1:", self.rot1, ", trans:" ,
print >>s, self.trans, ", rot2:" , self.rot2, ")"
return s.getvalue()
class motion_var:
""" variances used for motion generation """
def __init__(self, rotvar=(mt.radians(5), mt.radians(5)), transvar=(0.02, 0.03)):
"""
rotvar - a tuple of 2 floats
represents degree variance introduced per 1 rotation
degree variance per 1 meter traveled
transvar - a tuple of 2 floats
distance error introduced per 1 rotation
distance error introduced per 1 meter traveled
"""
self.rotvar = rotvar
self.transvar = transvar
def p2d_to_move2d(odom):
"""
Decompose odometry readings into three components
initial rotation
translation
final rotation
odom - a differential reading between this and the last time step
"""
if (odom.pos == np.matrix([0.0, 0.0]).T).all():
orot1 = 0.0
else:
orot1 = ut.standard_rad(ut.ang_of_vec(odom.pos) - odom.angle)
otran = np.linalg.norm(odom.pos)
orot2 = odom.angle - orot1
return move2d(orot1, otran, orot2)
def get_odom_samples(cov, s, motion, num_particles):
""" Get a pose sample, use this function to get multiple samples from robot motion model """
sampler = get_odom_sample(cov, motion)
particles = []
for i in xrange(num_particles):
sample = sampler(s)
particles.append(sample)
return particles
def get_odom_sample(motion_variances, motion): #s, motion_variances):
"""
Get a pose sample using odometry motion model (from Probabilistic Robotics p. 136)
use this method to get a sample pose, ignore others
motion - odometry in p2d format
s - state in p2d format from time t-1
motion_variances - motion variances
returns a new p2d, a perturbed version of motion+s
"""
u_move2d = p2d_to_move2d(motion)
#Partially applied motion
def get_odom_sample_partial(s):
# Sample
srot1 = sample_rot1 (u_move2d, motion_variances)
trans = sample_trans(u_move2d, motion_variances)
srot2 = sample_rot2 (u_move2d, motion_variances)
rot1 = ut.standard_rad(u_move2d.rot1 - srot1)
trans = u_move2d.trans - trans
rot2 = u_move2d.rot2 - srot2
#print mt.degrees(rot1), trans, mt.degrees(rot2)
# Calculate new values
sx = s.pos[0,0]
sy = s.pos[1,0]
x = sx + trans * mt.cos(s.angle + rot1)
y = sy + trans * mt.sin(s.angle + rot1)
total_rot = ut.standard_rad(s.angle + rot1 + rot2)
return t2d.Pose2D(x,y, total_rot)
return get_odom_sample_partial
def sample_rot1(odom, odom_cov):
rotvar = odom_cov.rotvar
rotvar_0 = rotvar[0] / (np.pi * 2.0)
scale = (rotvar_0 * abs(odom.rot1)) + (rotvar[1] * abs(odom.trans))
if scale == 0.0:
return 0.0
else:
return np.random.normal(scale=scale)
def sample_trans(odom, odom_cov):
transvar = odom_cov.transvar
rot_comp = transvar[0] * abs(odom.rot1 + odom.rot2)
trans_comp = transvar[1] * abs(odom.trans)
scale = rot_comp + trans_comp
if scale == 0.0:
return 0.0
else:
return np.random.normal(scale=scale)
def sample_rot2(odom, odom_cov):
rotvar = odom_cov.rotvar
rotvar_0 = rotvar[0] / (np.pi * 2.0)
scale = (rotvar_0 * abs(odom.rot2)) + (rotvar[1] * abs(odom.trans))
if scale == 0.0:
return 0.0
else:
return np.random.normal(scale=scale)
def draw_weighted_Pose2D(display, max_weight, particles):
for p in particles:
if type(p) is types.TupleType:
part, weight = p
rpos = part.pos
else:
part = p
rpos = p.pos
x = mt.cos(part.angle) * .07
y = mt.sin(part.angle) * .07
dir = rpos.copy()
dir[0,0] = dir[0,0] + x
dir[1,0] = dir[1,0] + y
pos = display.to_screen(rpos)
dirp = display.to_screen(dir)
if type(p) is types.TupleType:
color = round(255.0 * (weight/max_weight))
cv.cvCircle(display.buffer, cv.cvPoint((int) (pos[0,0]), (int) (pos[1,0])),
2, cv.cvScalar(255, 255-color, 255), cv.CV_FILLED, cv.CV_AA)
cv.cvCircle(display.buffer, cv.cvPoint((int) (pos[0,0]), (int) (pos[1,0])),
2, cv.cvScalar(200, 200, 200), 8, cv.CV_AA)
else:
cv.cvCircle(display.buffer, cv.cvPoint((int) (pos[0,0]), (int) (pos[1,0])),
2, cv.cvScalar(150, 150, 150), cv.CV_FILLED, cv.CV_AA)
cv.cvLine(display.buffer, cv.cvPoint((int) (pos[0,0]), (int) (pos[1,0])),
cv.cvPoint((int) (dirp[0,0]), (int) (dirp[1,0])),
cv.cvScalar(100,200,100), 1, cv.CV_AA, 0)
#class odometry2d:
# """ an odometry reading """
# def init(self, rot, trans):
# self.rot = rot
# self.trans = trans
# type params = RobotSampler.cov
# and state = Pose2.t
# and control = Pose2.t
#
# let predict odom_cov u (* s *)=
# let partial = RobotSampler.get_odom_sample u in
# (fun s -> partial s odom_cov)
#
# Keep it simple for now and limit to 2D motion
#type state = Pose2.t
#type error_wts = float*float
#type cov = {rot1w: error_wts;
# transw: error_wts;
# rot2w: error_wts}
# Example covariance for a typical situation
#let image_motion_cov = {rot1w=(0.995,0.005); transw=(0.995,0.005); rot2w=(0.995,0.005)}
#let make_cov rot1 trans rot2 = {rot1w=rot1; transw=trans; rot2w=rot2}
if __name__ == "__main__":
import nodes as nd
import detection_appearance as da
rotvar = (0.8, 0.2)
transvar = (0.1, 0.9)
motion_model = RobotMotion(motion_var(rotvar=rotvar, transvar=transfar))
cov = np.eye(3)
app_model = DetectionAppearance()
disp = nd.RobotDisp()
| [
[
1,
0,
0.004,
0.004,
0,
0.66,
0,
811,
0,
1,
0,
0,
811,
0,
0
],
[
1,
0,
0.008,
0.004,
0,
0.66,
0.0526,
526,
0,
1,
0,
0,
526,
0,
0
],
[
1,
0,
0.012,
0.004,
0,
0.66,
... | [
"import util as ut",
"import math as mt",
"import numpy as np",
"from StringIO import StringIO",
"import transforms2d as t2d",
"import opencv.cv as cv",
"import types",
"class RobotMotion:\n \"\"\" \n Generates sample from robot motion model, has methods used in particle filter \n Use... |
#!/usr/bin/python
import roslib
roslib.load_manifest("pr2_laser_follow_behavior")
import rospy
import numpy as np
import math
from actionlib_msgs.msg import *
from pr2_controllers_msgs.msg import *
from geometry_msgs.msg import *
from std_msgs.msg import String
from move_base_msgs.msg import *
import actionlib
import tf
import laser_interface.camera as cam
import hrl_lib.tf_utils as tfu
from threading import RLock
def in_bounds(p2d, xlim, ylim):
return (xlim[0] <= p2d[0,0]) and (p2d[0,0] <= xlim[1]) \
and (ylim[0] <= p2d[1,0]) and (p2d[1,0] <= ylim[1])
class LookAtBehavior:
def __init__(self, camera_root_topic):
#self.wait = False
#self.point3d = None
self.state = 'ready'
self.lock = RLock()
self.lock.acquire()
self.message = None
self.STATES = {'ready':'ready', # none
'head_turn': 'head_turn', # something
#'head_turn_drive': 'head_turn_drive', # something
'driving': 'driving'} # something
rospy.init_node('look_at_point_behavior', anonymous=True)
rospy.Subscriber('cursor3d', PointStamped, self.laser_point_handler)
self.point_pub = rospy.Publisher('cursor3dcentered', PointStamped)
self.double_click = rospy.Subscriber('mouse_left_double_click', String, self.move_base_double_click)
self.double_click2 = rospy.Subscriber('mouse_left_double_click', String, self.cancel_move_base_double_click)
self.camera_model = cam.ROSStereoCalibration('/' + camera_root_topic + '/left/camera_info' ,
'/' + camera_root_topic + '/right/camera_info')
self.head_client = actionlib.SimpleActionClient('head_traj_controller/point_head_action', PointHeadAction)
#self.head_client.wait_for_server()
self.base_client = actionlib.SimpleActionClient('move_base', MoveBaseAction)
#self.base_client.wait_for_server()
#self.move_pub = rospy.Publisher('move_base_simple/goal', PoseStamped)
self.move_pub = rospy.Publisher('look_at_point_goal', PoseStamped)
#self.move_pub2 = rospy.Publisher('hai_constant', PoseStamped)
self.tflistener = tf.TransformListener()
self.lock.release()
print 'running'
def move_base_double_click(self, a_str):
if self.message == None:
rospy.logwarn('Unable to go, no message heard.')
return
else:
self.lock.acquire()
self.state = self.STATES['driving']
#Looking at the point last clicked on... (maybe keep doing this as we drive?)
#self.look_at(self.message)
#Move base
self.move_base(self.message)
self.message = None
self.state = self.STATES['ready']
self.lock.release()
def transform_point(self, point_stamped):
point_head = point_stamped.point
#Tranform into base link
target_link = '/base_link'
base_T_head = tfu.transform(target_link, point_stamped.header.frame_id, self.tflistener)
point_mat_head = tfu.translation_matrix([point_head.x, point_head.y, point_head.z])
point_mat_base = base_T_head * point_mat_head
t_base, o_base = tfu.matrix_as_tf(point_mat_base)
#Calculate angle robot should face
angle = math.atan2(t_base[1], t_base[0])
q = tf.transformations.quaternion_from_euler(0, 0, angle)
return (t_base, q, target_link)
def move_base(self, point, wait=False):
t_base, q, target_link = point
ps = PoseStamped()
ps.header.frame_id = target_link
ps.pose.position = geometry_msgs.msg.Point(t_base[0], t_base[1], 0)
ps.pose.orientation = geometry_msgs.msg.Quaternion(*q)
self.move_pub.publish(ps)
#Uncomment to actually move
goal = MoveBaseGoal()
goal.target_pose.header.frame_id = target_link
goal.target_pose.pose.position = geometry_msgs.msg.Point(t_base[0], t_base[1], 0)
goal.target_pose.pose.orientation = geometry_msgs.msg.Quaternion(*q)
self.base_client.send_goal(goal)
print 'Sent GOAL'
if wait:
self.base_client.wait_for_result()
if self.base_client.get_state() == GoalStatus.SUCCEEDED:
return True
else:
return False
def laser_point_handler(self, point_stamped):
p = np.matrix([point_stamped.point.x, point_stamped.point.y, point_stamped.point.z, 1.]).T
p2d = self.camera_model.left.P * p
p2d = p2d / p2d[2,0]
bx = ((self.camera_model.left.w/2.) * .9)
by = ((self.camera_model.left.h/2.) * .9)
xlim = [bx, self.camera_model.left.w - bx]
ylim = [by, self.camera_model.left.h - by]
if (self.state == self.STATES['driving']):
return
self.message = self.transform_point(point_stamped)
if not in_bounds(p2d, xlim, ylim):
if self.state != self.STATES['head_turn']:
self.lock.acquire()
self.state = self.STATES['head_turn']
self.lock.release()
#else if we are in bounds, we do nothing
#always update laser's location
def run(self):
r = rospy.Rate(50)
while not rospy.is_shutdown():
r.sleep()
if self.state == self.STATES['head_turn']:
self.lock.acquire()
result = self.look_at(self.message, False)
self.state = self.STATES['ready']
self.lock.release()
def look_at(self, message, wait=True):
g = PointHeadGoal()
g.target.header.frame_id = message[2]
g.target.point = geometry_msgs.msg.Point(*message[0])
g.min_duration = rospy.Duration(1.0)
g.max_velocity = 10.
self.head_client.send_goal(g)
#rospy.loginfo('Sent look at goal ' + str(g))
if wait:
self.head_client.wait_for_result()
if self.head_client.get_state() == GoalStatus.SUCCEEDED:
return True
else:
return False
if __name__ == '__main__':
lab = LookAtBehavior('wide_stereo')
lab.run()
| [
[
1,
0,
0.012,
0.006,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.018,
0.006,
0,
0.66,
0.0588,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.024,
0.006,
0,
0.66,
... | [
"import roslib",
"roslib.load_manifest(\"pr2_laser_follow_behavior\")",
"import rospy",
"import numpy as np",
"import math",
"from actionlib_msgs.msg import *",
"from pr2_controllers_msgs.msg import *",
"from geometry_msgs.msg import *",
"from std_msgs.msg import String",
"from move_base_msgs.msg ... |
import roslib
roslib.load_manifest('pr2_laser_follow_behavior')
import rospy
from geometry_msgs.msg import PointStamped
from geometry_msgs.msg import PoseStamped
import hrl_lib.tf_utils as tfu
import tf
class FollowPointBehavior:
def __init__(self):
rospy.Subscriber('cursor3dcentered', PointStamped, follow_point_cb)
self.move_pub = rospy.Publisher('move_base_simple/goal', PoseStamped)
self.tflistener = tf.TransformListener()
def follow_point_cb(self, point_stamped):
point_head = point_stamped.point
base_T_head = tfu.transform('/base_link', point_stamped.header.frame_id, self.tflistener)
point_mat_head = tfu.translation_matrix([point.x, point.y, point.z])
point_mat_base = base_T_head * point_mat_head
t_base, o_base = tfu.matrix_as_tf(point_mat_base)
x = t_base[0]
y = t_base[1]
angle = math.atan2(y, x)
ps = PoseStamped()
ps.header.frame_id = '/base_link'
ps.pose.position.x = x
ps.pose.position.y = y
ps.pose.position.z = 0.
q = tf.transformations.quaternion_from_euler(angle, 0, 0)
ps.pose.orientation.x = q[0]
ps.pose.orientation.y = q[1]
ps.pose.orientation.z = q[2]
ps.pose.orientation.w = q[3]
self.move_pub.publish(ps)
def run(self):
r = rospy.Rate(10)
while not rospy.is_shutdown():
r.sleep()
if __name__ == '__main__':
rospy.init_node('follow_point_node', anonymous=True)
#Subscribes to laser point
#sends point out to move_base_simple/goal
#header:
# seq: 0
# stamp:
# secs: 1278624411
# nsecs: 326550373
# frame_id: /map
#pose:
# position:
# x: 1.49216187
# y: -0.0629254132509
# z: 0.0
# orientation:
# x: 0.0
# y: 0.0
# z: 0.127143523912
# w: 0.991884330115
| [
[
1,
0,
0.0152,
0.0152,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0303,
0.0152,
0,
0.66,
0.125,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0455,
0.0152,
0,
0.6... | [
"import roslib",
"roslib.load_manifest('pr2_laser_follow_behavior')",
"import rospy",
"from geometry_msgs.msg import PointStamped",
"from geometry_msgs.msg import PoseStamped",
"import hrl_lib.tf_utils as tfu",
"import tf",
"class FollowPointBehavior:\n\n def __init__(self):\n rospy.Subscrib... |
#! /usr/bin/python
import roslib
roslib.load_manifest('pr2_follow_laser_behavior')
import rospy
import actionlib
from actionlib_msgs.msg import *
from pr2_controllers_msgs.msg import *
from geometry_msgs.msg import *
rospy.init_node('move_the_head', anonymous=True)
client = actionlib.SimpleActionClient(
'/head_traj_controller/point_head_action', PointHeadAction)
client.wait_for_server()
g = PointHeadGoal()
g.target.header.frame_id = 'base_link'
#g.target.header.frame_id = 'wide_stereo_optical_frame'
g.target.point.x = 1.
g.target.point.y = 0
g.target.point.z = 1.
g.min_duration = rospy.Duration(1.0)
client.send_goal(g)
client.wait_for_result()
if client.get_state() == GoalStatus.SUCCEEDED:
print "Succeeded"
else:
print "Failed"
| [
[
1,
0,
0.0625,
0.0312,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0938,
0.0312,
0,
0.66,
0.0556,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.1562,
0.0312,
0,
0.... | [
"import roslib",
"roslib.load_manifest('pr2_follow_laser_behavior')",
"import rospy",
"import actionlib",
"from actionlib_msgs.msg import *",
"from pr2_controllers_msgs.msg import *",
"from geometry_msgs.msg import *",
"rospy.init_node('move_the_head', anonymous=True)",
"client = actionlib.SimpleAct... |
#!/usr/bin/python
import roslib
roslib.load_manifest("pr2_laser_follow_behavior")
import rospy
import numpy as np
import math
from actionlib_msgs.msg import *
from pr2_controllers_msgs.msg import *
from geometry_msgs.msg import *
from std_msgs.msg import String
from move_base_msgs.msg import *
import actionlib
import tf
import laser_interface.camera as cam
import hrl_lib.tf_utils as tfu
from threading import RLock
def in_bounds(p2d, xlim, ylim):
return (xlim[0] <= p2d[0,0]) and (p2d[0,0] <= xlim[1]) \
and (ylim[0] <= p2d[1,0]) and (p2d[1,0] <= ylim[1])
class LookAtBehavior:
def __init__(self, camera_root_topic):
self.state = 'turn'
self.move_state = None
self.laser_point_base = None
self.message = None
self.double_click = None
self.seq = 0
rospy.init_node('look_at_point_behavior', anonymous=True)
rospy.Subscriber('cursor3d', PointStamped, self.laser_point_handler)
self.double_click = rospy.Subscriber('mouse_left_double_click', String, self.double_click_cb)
self.camera_model = cam.ROSStereoCalibration('/' + camera_root_topic + '/left/camera_info' ,
'/' + camera_root_topic + '/right/camera_info')
self.head_client = actionlib.SimpleActionClient('head_traj_controller/point_head_action', PointHeadAction)
self.base_client = actionlib.SimpleActionClient('move_base', MoveBaseAction)
self.move_pub = rospy.Publisher('look_at_point_goal', PoseStamped)
self.tflistener = tf.TransformListener()
rospy.loginfo( 'Running')
def double_click_cb(self, a_str):
rospy.loginfo('Double CLICKED')
self.double_click = True
def laser_point_handler(self, point_stamped):
if self.state == 'turn':
self.laser_point_base = self.transform_point(point_stamped)
self.message = point_stamped
def transform_point(self, point_stamped):
point_head = point_stamped.point
#Tranform into base link
target_link = '/base_link'
base_T_head = tfu.transform(target_link, point_stamped.header.frame_id, self.tflistener)
point_mat_head = tfu.translation_matrix([point_head.x, point_head.y, point_head.z])
point_mat_base = base_T_head * point_mat_head
t_base, o_base = tfu.matrix_as_tf(point_mat_base)
#Calculate angle robot should face
angle = math.atan2(t_base[1], t_base[0])
q_base = tf.transformations.quaternion_from_euler(0, 0, angle)
return (t_base, q_base, target_link)
def move_base(self, point, wait=True):
t_base, q, target_link = point
ps = PoseStamped()
ps.header.frame_id = target_link
ps.pose.position = geometry_msgs.msg.Point(t_base[0], t_base[1], 0)
ps.pose.orientation = geometry_msgs.msg.Quaternion(*q)
self.move_pub.publish(ps)
#Uncomment to actually move
goal = MoveBaseGoal()
goal.target_pose.header.frame_id = target_link
goal.target_pose.pose.position = geometry_msgs.msg.Point(t_base[0], t_base[1], 0)
goal.target_pose.pose.orientation = geometry_msgs.msg.Quaternion(*q)
self.base_client.send_goal(goal)
if wait:
self.base_client.wait_for_result()
if self.base_client.get_state() == GoalStatus.SUCCEEDED:
return True
else:
return False
def look_at(self, message, wait=True):
g = PointHeadGoal()
g.target.header.frame_id = message[2]
g.target.point = geometry_msgs.msg.Point(*message[0])
g.min_duration = rospy.Duration(1.0)
g.max_velocity = 10.
self.head_client.send_goal(g)
if wait:
self.head_client.wait_for_result(rospy.Duration(1.))
if self.head_client.get_state() == GoalStatus.SUCCEEDED:
return True
else:
return False
def run(self):
r = rospy.Rate(100)
timeout_time = None
self.double_click = None
while not rospy.is_shutdown():
if self.state == 'turn':
if self.laser_point_base is not None:
if self.double_click is None:
if self.message.header.seq != self.seq:
self.seq = self.message.header.seq
point_stamped = self.message
p = np.matrix([point_stamped.point.x, point_stamped.point.y, point_stamped.point.z, 1.]).T
p2d = self.camera_model.left.P * p
p2d = p2d / p2d[2,0]
bx = ((self.camera_model.left.w/2.) * .9)
by = ((self.camera_model.left.h/2.) * .9)
xlim = [bx, self.camera_model.left.w - bx]
ylim = [by, self.camera_model.left.h - by]
if not in_bounds(p2d, xlim, ylim):
rospy.loginfo('\'turn\': Looking at laser point msg #: ' + str(self.message.header.seq))
self.look_at(self.laser_point_base, True)
else:
rospy.loginfo('\'turn\': double clicked. Transitioning to \'move\'.')
self.state = 'move'
self.move_state = 'send_cmd'
self.double_click = None
elif self.state == 'move':
if self.move_state == 'send_cmd':
if self.laser_point_base is not None:
rospy.loginfo('\'move\': Sending move command.')
self.move_base(self.laser_point_base, False)
self.move_state = 'check_status'
self.laser_point_base = None
self.message = None
else:
raise RuntimeError('laser_point_base is none!')
elif self.move_state == 'check_status':
if self.double_click is not None:
rospy.loginfo('\'move\': Canceling goal. Transitioning back to \'turn\'.')
self.base_client.cancel_goal()
self.state = 'turn'
self.move_state = None
self.double_click = None
else:
if self.base_client.get_state() == GoalStatus.SUCCEEDED or \
self.base_client.simple_state == actionlib.SimpleGoalState.DONE:
rospy.loginfo('\'move\': Reached goal. Transitioning to \'turn\'.')
self.state = 'turn'
self.move_state = None
self.double_click = None
#only if we exceed our wait oime
#else:
# return False???
else:
raise RuntimeError('invalid state for self.move_state')
else:
raise RuntimeError('invalid state for self.state')
r.sleep()
if __name__ == '__main__':
lab = LookAtBehavior('wide_stereo')
lab.run()
| [
[
1,
0,
0.0113,
0.0056,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0169,
0.0056,
0,
0.66,
0.0588,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0226,
0.0056,
0,
0.... | [
"import roslib",
"roslib.load_manifest(\"pr2_laser_follow_behavior\")",
"import rospy",
"import numpy as np",
"import math",
"from actionlib_msgs.msg import *",
"from pr2_controllers_msgs.msg import *",
"from geometry_msgs.msg import *",
"from std_msgs.msg import String",
"from move_base_msgs.msg ... |
import roslib; roslib.load_manifest('hai_sandbox')
from cv_bridge.cv_bridge import CvBridge, CvBridgeError
import rospy
import cv
import sys
import hrl_lib.tf_utils as tfu
import tf.transformations as tr
import tf
import hrl_camera.ros_camera as cam
from sensor_msgs.msg import CameraInfo
import numpy as np
import hai_sandbox.features as fea
##
# from camera.py in laser_interface.
class ROSCameraCalibration:
def __init__(self, channel):
rospy.Subscriber(channel, CameraInfo, self.camera_info)
self.has_msg = False
def camera_info(self, msg):
self.distortion = np.matrix(msg.D)
self.K = np.reshape(np.matrix(msg.K), (3,3))
self.R = np.reshape(np.matrix(msg.R), (3,3))
self.P = np.reshape(np.matrix(msg.P), (3,4))
self.w = msg.width
self.h = msg.height
self.frame = msg.header.frame_id
self.has_msg = True
##
# project 3D point into this camera
#
# @param p 3x1 matrix in given coord frame
# @param tf_listener None if transformation not needed
# @param from_frame None is default camera frame
# @return 2x1 matrix
def project(self, p, tf_listener=None, from_frame=None):
if not(from_frame == None or from_frame == self.frame):
p_cam = tfu.transform(self.frame, from_frame, tf_listener) \
* tfu.tf_as_matrix((p.A1.tolist(), tr.quaternion_from_euler(0,0,0)))
trans, q = tfu.matrix_as_tf(p_cam)
p = np.matrix(trans).T
p = np.row_stack((p, np.matrix([1.])))
pp = self.P * p
pp = pp / pp[2,0]
return pp[0:2,0]
class GripperTipProjected:
def __init__(self):
forearm_cam_l = '/l_forearm_cam/image_rect_color'
ws_l = '/wide_stereo/left/image_rect_color'
ws_r = '/wide_stereo/right/image_rect_color'
ws_linf = '/wide_stereo/left/camera_info'
ws_rinf = '/wide_stereo/right/camera_info'
self.finger_tips = ['r_gripper_l_finger_tip_link',
'r_gripper_r_finger_tip_link',
'l_gripper_l_finger_tip_link',
'l_gripper_r_finger_tip_link']
self.camera_fr = ['r_forearm_cam_optical_frame',
'l_forearm_cam_optical_frame',
'wide_stereo_optical_frame']
rospy.init_node('gripper_pose_viewer')
#self.camera_geo = ROSCameraCalibration('/wide_stereo/left/camera_info')
self.camera_geo = ROSCameraCalibration('/l_forearm_cam/camera_info')
self.camera = cam.ROSImageClient(forearm_cam_l)
self.tflistener = tf.TransformListener()
def run(self):
cv.NamedWindow('surf', 1)
while not rospy.is_shutdown():
image = self.camera.get_frame()
image_gray = fea.grayscale(image)
surf_keypoints, surf_descriptors = fea.surf(image_gray)
vis_img = fea.draw_surf(image, surf_keypoints, (255, 0, 0))
#Project the tip of the gripper (both of them) into the image frame
img_ll = self.camera_geo.project(np.matrix([0,0,0.]).T, self.tflistener, self.finger_tips[2])
img_lr = self.camera_geo.project(np.matrix([0,0,0.]).T, self.tflistener, self.finger_tips[3])
cv.Circle(vis_img, tuple(np.matrix(np.round(img_ll), dtype='int').A1.tolist()), 30, (0, 255, 0), 1, cv.CV_AA)
cv.Circle(vis_img, tuple(np.matrix(np.round(img_lr), dtype='int').A1.tolist()), 30, (0, 255, 0), 1, cv.CV_AA)
cv.ShowImage('surf', vis_img)
cv.WaitKey(10)
if __name__ == '__main__':
g = GripperTipProjected()
g.run()
#fname = sys.argv[1]
#bridge = CvBridge()
#ws_leftinfo = ROSCameraCalibration(ws_linf)
#ws_rightinfo = ROSCameraCalibration(ws_rinf)
| [
[
1,
0,
0.0093,
0.0093,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0093,
0.0093,
0,
0.66,
0.0667,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0185,
0.0093,
0,
0.... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"from cv_bridge.cv_bridge import CvBridge, CvBridgeError",
"import rospy",
"import cv",
"import sys",
"import hrl_lib.tf_utils as tfu",
"import tf.transformations as tr",
"import tf",
"impor... |
#! /usr/bin/python
import roslib; roslib.load_manifest('hai_sandbox')
import hrl_camera.ros_camera as rc
import hrl_lib.rutils as ru
import hrl_lib.util as ut
import rospy
import math
import cv
#TF
import tf
import hrl_lib.tf_utils as tfu
import tf.transformations as tr
if __name__ == '__main__':
ls = ru.LaserScanner('point_cloud_srv')
prosilica = rc.Prosilica('prosilica', 'streaming')
if __name__ == '__main__':
import sys
base_name = sys.argv[1]
test = 'laser'
ls = ru.LaserScanner('point_cloud_srv')
prosilica = rc.Prosilica('prosilica', 'streaming')
tf_listener = tf.TransformListener()
rospy.loginfo( 'Getting laser scan.')
points = ls.scan(math.radians(180.), math.radians(-180.), 20.)
rospy.loginfo('Size of point cloud: %d' % len(points.points))
rospy.loginfo( 'Grabbing image.')
image = prosilica.get_frame()
rospy.loginfo( 'Grabbing transforms.')
#transform from tilt_laser => base_footprint (pointcloud is already in base_footprint)
#transform from base_footprint => (pose of head) prosilica
pro_T_bf = tfu.transform('/high_def_optical_frame', '/base_footprint', tf_listener)
#transform from base_footprint => map
map_T_bf = tfu.transform('/map', '/base_footprint', tf_listener)
#get camera's P matrix
rospy.loginfo('Waiting for camera_info.')
calibration = rc.ROSCameraCalibration('/prosilica/camera_info')
r = rospy.Rate(10)
while not rospy.is_shutdown() and calibration.has_msg == False:
r.sleep()
rospy.loginfo('Saving.')
pkl_name = '%s.pkl' % base_name
img_name = '%s.png' % base_name
ut.save_pickle({'points': points,
'pro_T_bf': pro_T_bf,
'map_T_bf': map_T_bf,
'camera_info': calibration}, pkl_name)
cv.SaveImage(img_name, image)
rospy.loginfo( 'Saved to %s and %s.' % (pkl_name, img_name))
| [
[
1,
0,
0.0333,
0.0167,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0333,
0.0167,
0,
0.66,
0.0833,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.05,
0.0167,
0,
0.66... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import hrl_camera.ros_camera as rc",
"import hrl_lib.rutils as ru",
"import hrl_lib.util as ut",
"import rospy",
"import math",
"import cv",
"import tf",
"import hrl_lib.tf_utils as tfu",
... |
import csv
import roslib; roslib.load_manifest('hai_sandbox')
from cv_bridge.cv_bridge import CvBridge, CvBridgeError
import rospy
import cv
import sys
import hrl_lib.rutils as ru
import hrl_lib.tf_utils as tfu
import tf.transformations as tr
import tf
import hrl_camera.ros_camera as cam
from sensor_msgs.msg import CameraInfo
import numpy as np
import hai_sandbox.features as fea
import os.path as pt
import hrl_lib.util as ut
import scipy.cluster.vq as vq
import os
def csv_bag_names(fname):
csv_file = open(fname)
for bag_name in csv.reader(csv_file):
yield bag_name
csv_file.close()
if __name__ == '__main__':
import sys
fname = sys.argv[1]
for path in csv_bag_names(fname):
cmd ='python test08.py %s' % path[0]
print cmd
os.system(cmd)
| [
[
1,
0,
0.0294,
0.0294,
0,
0.66,
0,
312,
0,
1,
0,
0,
312,
0,
0
],
[
1,
0,
0.0588,
0.0294,
0,
0.66,
0.05,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0588,
0.0294,
0,
0.... | [
"import csv",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"from cv_bridge.cv_bridge import CvBridge, CvBridgeError",
"import rospy",
"import cv",
"import sys",
"import hrl_lib.rutils as ru",
"import hrl_lib.tf_utils as tfu",
"import t... |
import roslib; roslib.load_manifest('hai_sandbox')
import rospy
import tf.transformations as tr
import hrl_lib.tf_utils as tfu
import hrl_lib.util as ut
import tf
import geometry_msgs.msg as gm
import trigger_msgs.msg as trm
import sys
import math
import numpy as np
class MoveBase:
def __init__(self):
rospy.init_node('drive')
self.tw_pub = rospy.Publisher('base_controller/command', gm.Twist)
self.tl = tf.TransformListener()
def go_ang(self, ang, speed):
dt = math.radians(ang)
if dt > 0:
sign = -1
elif dt < 0:
sign = 1
else:
sign = 0
self.tl.waitForTransform('base_footprint', 'odom_combined', rospy.Time(), rospy.Duration(10))
p0_base = tfu.transform('base_footprint', 'odom_combined', self.tl)# \
start_ang = tr.euler_from_matrix(p0_base[0:3, 0:3], 'sxyz')[2]
r = rospy.Rate(100)
dist_so_far = 0.
last_ang = start_ang
while not rospy.is_shutdown():
pcurrent_base = tfu.transform('base_footprint', 'odom_combined', self.tl) #\
current_ang = tr.euler_from_matrix(pcurrent_base[0:3, 0:3], 'sxyz')[2]
dist_so_far = dist_so_far + (ut.standard_rad(current_ang - last_ang))
if dt > 0 and dist_so_far > dt:
rospy.loginfo('stopped! %f %f' % (dist_so_far, dt))
break
elif dt < 0 and dist_so_far < dt:
rospy.loginfo('stopped! %f %f' % (dist_so_far, dt))
break
elif dt == 0:
rospy.loginfo('stopped! %f %f' % (dist_so_far, dt))
break
tw = gm.Twist()
tw.angular.z = math.radians(speed * sign)
self.tw_pub.publish(tw)
r.sleep()
last_ang = current_ang
def go_x(self, x, speed):
print 'go x called!'
vel = speed * np.sign(x)
self.tl.waitForTransform('base_footprint', 'odom_combined', rospy.Time(), rospy.Duration(10))
p0_base = tfu.transform('base_footprint', 'odom_combined', self.tl)
r = rospy.Rate(100)
while not rospy.is_shutdown():
pcurrent_base = tfu.transform('base_footprint', 'odom_combined', self.tl)
relative_trans = np.linalg.inv(p0_base) * pcurrent_base
dist_moved = np.linalg.norm(relative_trans[0:3,3])
print "%s" % str(dist_moved)
if dist_moved > np.abs(x):
rospy.loginfo('stopped! error %f' % (np.abs(dist_moved-np.abs(x))))
break
tw = gm.Twist()
tw.linear.x = vel
tw.linear.y = 0
tw.linear.z = 0
tw.angular.x = 0
tw.angular.y = 0
tw.angular.z = 0
self.tw_pub.publish(tw)
r.sleep()
if __name__ == '__main__':
m = MoveBase()
#m.go_ang(-390, 100)
m.go_x(.2, .05)
| [
[
1,
0,
0.0112,
0.0112,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0112,
0.0112,
0,
0.66,
0.0769,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0225,
0.0112,
0,
0.... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import rospy",
"import tf.transformations as tr",
"import hrl_lib.tf_utils as tfu",
"import hrl_lib.util as ut",
"import tf",
"import geometry_msgs.msg as gm",
"import trigger_msgs.msg as trm... |
import csv
import roslib; roslib.load_manifest('hai_sandbox')
from cv_bridge.cv_bridge import CvBridge, CvBridgeError
import rospy
import cv
import sys
import hrl_lib.rutils as ru
import hrl_lib.tf_utils as tfu
import tf.transformations as tr
import tf
import hrl_camera.ros_camera as cam
from sensor_msgs.msg import CameraInfo
import numpy as np
import hai_sandbox.features as fea
import os.path as pt
import hrl_lib.util as ut
import scipy.cluster.vq as vq
def csv_bag_names(fname):
csv_file = open(fname)
for bag_name in csv.reader(csv_file):
yield bag_name
csv_file.close()
def gen_pkl_name(path_complete, ext):
#path_complete = path_complete[0]
path_bag, name_bag = pt.split(path_complete)
root_name, _ = pt.splitext(name_bag)
surf_path_complete = pt.join(path_bag, root_name + ext)
return surf_path_complete
def features_mat(features_list):
features = []
for msg_t, surf in features_list:
features.append(np.matrix(surf[1]).T)
return np.column_stack(tuple(features))
def features_mat_compress(fmat, k):
#k = max(int(round(fmat.shape[1] * percent)), 1)
rospy.loginfo('compressing to %d centers' % k)
center_indices = np.random.permutation(fmat.shape[1])[0:k]
initial_centers = fmat[:, center_indices]
kresults = vq.kmeans(np.array(fmat.T), np.array(initial_centers.T))
return np.matrix(kresults[0]).T
def compress_pkl(surf_path_complete):
features_list = ut.load_pickle(surf_path_complete)
rospy.loginfo('making matrix')
fmat = features_mat(features_list)
rospy.loginfo('compressing')
reduced_features = features_mat_compress(fmat, 1000)
small_pickle_fname = gen_pkl_name(path_complete, '.surf_sm_pkl')
ut.save_pickle(reduced_features, small_pickle_fname)
rospy.loginfo('saved to %s' % small_pickle_fname)
if __name__ == '__main__':
##
# "compresss" large pkls
path_complete = sys.argv[1]
#for path_complete in csv_bag_names(fname):
surf_path_complete = gen_pkl_name(path_complete, ext=".surf_pkl")
rospy.loginfo('loading %s' % surf_path_complete)
compress_pkl(surf_path_complete)
rospy.loginfo('done')
exit()
#forearm_cam_l = '/l_forearm_cam/image_rect_color'
#ws_l = '/wide_stereo/left/image_rect_color'
#ws_r = '/wide_stereo/right/image_rect_color'
#features_list = find_image_features(sys.argv[1], forearm_cam_l)
#Find all features in all videos that we have
#list_of_features_list = []
#list_of_features_list.append(reduced_features)
#break
#ut.save_pickle(list_of_features_list, 'reduced_all_features.pkl')
#pdb.set_trace()
#Put all features into one large matrix, cluster...
#for message_t, surf in list_of_features_list:
# keypoints, descriptors = surf
#Kmean wants row vectors
#What happens if we have many duplicated points?
#whitened = vq.whiten(features)
#book = np.array((whitened[0],whitened[2]))
#vq.kmeans(whitened, book)
| [
[
1,
0,
0.0088,
0.0088,
0,
0.66,
0,
312,
0,
1,
0,
0,
312,
0,
0
],
[
1,
0,
0.0175,
0.0088,
0,
0.66,
0.0435,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0175,
0.0088,
0,
... | [
"import csv",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"from cv_bridge.cv_bridge import CvBridge, CvBridgeError",
"import rospy",
"import cv",
"import sys",
"import hrl_lib.rutils as ru",
"import hrl_lib.tf_utils as tfu",
"import t... |
import roslib; roslib.load_manifest('hai_sandbox')
import rospy
import glob
import hrl_lib.rutils as ru
import hrl_pr2_lib.devices as hpr2
import hai_sandbox.bag_processor as bp
import hrl_lib.util as ut
import visualization_msgs.msg as vm
import numpy as np
import hrl_lib.viz as viz
import hai_sandbox.dimreduce as dimreduce
import sensor_msgs.msg as sm
import scipy.spatial as sp
import pdb
import scipy.stats as kde
import pr2_msgs.msg as pm
import threading
##
# Takes in a folder, outputs a dictionary organized by data by contact segment,
# topic, list of {'left', 'right', 't'}
#
def success_failure_classification_preprocess(folder_name):
data_dict = None
#counter = 0
for bag_name in glob.glob('%s/*.bag' % folder_name):
print 'Loading bag %s' % bag_name
topics_dict = ru.bag_sel(bag_name,
['/imitate_behavior_marker',
'/pressure/l_gripper_motor',
'/pressure/r_gripper_motor',
'/accelerometer/l_gripper_motor',
'/accelerometer/r_gripper_motor',
'/joint_states',
'/l_cart/command_pose',
'/r_cart/command_pose'])
##break each mat into segments based on states
#/imitate_behavior_marker
#topics_dict['/imitate_behavior_marker']['t']
print 'There are %d marker messages.' % len(topics_dict['/imitate_behavior_marker']['msg'])
time_segments = [[]]
for marker_msg in topics_dict['/imitate_behavior_marker']['msg']:
if len(time_segments[-1]) >= 2:
time_segments.append([])
time_segments[-1].append(marker_msg.header.stamp.to_time())
if data_dict == None:
data_dict = {}
#organize by segment, then by type, then by what that type will store
for i in range(len(time_segments)):
data_dict[i] = {}
###pressure mat with times
##/pressure/l_gripper_motor
##/pressure/r_gripper_motor
for ptop in ['/pressure/l_gripper_motor', '/pressure/r_gripper_motor']:
p = topics_dict[ptop]
psegs = bp.segment_msgs(time_segments, p['msg'])
print '>> Separated records of %s (%s) into %d segments' % (ptop, bag_name, len(psegs))
#convert segments into mats
for i, pseg in enumerate(psegs):
#each col is an example in left-f and right_f
left_f, right_f, ptimes = hpr2.pressure_state_to_mat(pseg)
if not data_dict[i].has_key(ptop):
data_dict[i][ptop] = []
data_dict[i][ptop].append({'left': left_f, 'right': right_f, 't': ptimes})
for ptop in ['/pressure/l_gripper_motor', '/pressure/r_gripper_motor']:
for k in data_dict.keys():
print '>>', k, ptop, len(data_dict[k][ptop])
return data_dict
def select_time(data, time_rec, time_start, time_end):
#pdb.set_trace()
return data[:, np.where(np.multiply(time_rec >= time_start, time_rec < time_end))[0]]
def break_record_matrices_into_chunks(data_dict, segmented_matrices, \
minimal_trial_lengths, chunk_times, topic):
#For each state
data_sets = {}
for state in range(len(data_dict.keys())):
data_sets[state] = {}
time_starts = np.arange(0, minimal_trial_lengths[state], chunk_times[state])
time_ends = np.arange(chunk_times[state], minimal_trial_lengths[state] + chunk_times[state], chunk_times[state])
#Go over all the records
for record_number in range(len(data_dict[state][topic])):
time_rec = segmented_matrices[record_number]['t'][state]
time_rec = time_rec - time_rec[0]
#Break them up into little chunks
for tidx in range(len(time_starts)):
if not data_sets[state].has_key(tidx):
data_sets[state][tidx] = {}
data_sets[state][tidx]['data'] = []
data_sets[state][tidx]['time'] = [time_starts[tidx], time_ends[tidx]]
#pdb.set_trace()
data_chunk = select_time(segmented_matrices[record_number]['mat'][state], time_rec,
time_starts[tidx], time_ends[tidx])
data_sets[state][tidx]['data'].append(data_chunk)
return data_sets
class TimeSeriesVectorizer:
def __init__(self):
self.channels = {}
self.buffer_lengths = {}
def register_listener(self, vectorize_func, msg_type, topic, buffer_length_secs):
self.buffer_lengths[topic] = buffer_length_secs
def listener_func(msg):
amat = vectorize_func(msg)
t = np.matrix([msg.header.stamp.to_time()])
got_lock = False
if self.channels[topic][0] == None:
self.channels[topic] = [amat, t, threading.RLock()]
else:
lock = self.channels[topic][2]
lock.acquire()
got_lock = True
#print 'l locked'
new_record = [np.column_stack((self.channels[topic][0], amat)),
np.column_stack((self.channels[topic][1], t)),
lock]
#print 'got something', new_record[0].shape
self.channels[topic] = new_record
#print 'after appending', self.channels[topic][0].shape, self.channels[topic][1].shape
#print 'time recorded is', t[0,0]
#print 'shape', self.channels[topic][0].shape
#lock.release()
#print 'l released'
lock = self.channels[topic][2]
if not got_lock:
lock.acquire()
#lock.acquire()
#select only messages n-seconds ago
n_seconds_ago = t[0,0] - buffer_length_secs
records_in_range = (np.where(self.channels[topic][1] >= n_seconds_ago)[1]).A1
#print records_in_range, self.channels[topic][0].shape
self.channels[topic][0] = self.channels[topic][0][:, records_in_range]
self.channels[topic][1] = self.channels[topic][1][:, records_in_range]
#print 'after shortening', self.channels[topic][0].shape, self.channels[topic][1].shape
#print 'shape after selection...', self.channels[topic][0].shape
lock.release()
self.channels[topic] = [None]
rospy.Subscriber(topic, msg_type, listener_func)
def get_n_steps(self, topic, timestart, nsteps, wait=True):
#print 'timestart is', timestart
if self.channels[topic][0] != None:
if timestart < self.channels[topic][1][0,0]:
# [0,0] is the earliest, and [0,-1] is the latest
#print self.channels[topic][1][0,0], self.channels[topic][1][0,-1], self.channels[topic][1].shape
#print 'diff', self.channels[topic][1][0,0] - timestart
#print 'diff', self.channels[topic][1][0,-1] - timestart
#print timestart, self.channels[topic][1][0,0]
raise RuntimeError('timestart <= self.channels[topic][1][0,0]')
r = rospy.Rate(100)
selected = None
while selected == None:
if self.channels[topic][0] == None:
r.sleep()
continue
lock = self.channels[topic][2]
lock.acquire()
#print 'g locked'
#print self.channels[topic][0].shape
record_idxs = (np.where(self.channels[topic][1] > timestart)[1]).A1
#print self.channels[topic][0].shape, self.channels[topic][1].shape
#print record_idxs
#print record_idxs, self.channels[topic][0].shape
records_from_time = self.channels[topic][0][:, record_idxs]
#print 'records_from_time', records_from_time.shape, 'need', nsteps
#print '>>'
selected = records_from_time[:, :nsteps]
lock.release()
#print 'g released'
#r.sleep()
if selected.shape[1] < nsteps:
if not wait:
return None
else:
selected = None
r.sleep()
else:
return selected
def get_range(self, topic, time_start, time_end):
times = self.channels[topic][1]
selected = self.channels[topic][0][:, np.where((times > time_start) * (times <= time_end))]
return selected
#class SuccessProbabilityEstimator:
class TestOnlineClassification:
def __init__(self):
rospy.init_node('success_classifier')
self.vectorizer = TimeSeriesVectorizer()
def pressure_vectorizer(pmsg):
return np.row_stack((np.matrix((pmsg.l_finger_tip)).T, np.matrix((pmsg.r_finger_tip)).T))
self.vectorizer.register_listener(pressure_vectorizer, pm.PressureState, '/pressure/l_gripper_motor', 15.)
def start_classifying(self):
#print 'start_classifying'
segment_idx = 0
segment_lengths = [10, 10, 10]
n_segments = len(segment_lengths)
for segment_idx in range(n_segments):
n_steps = segment_lengths[segment_idx]
#print 'getting_n_steps'
selected = self.vectorizer.get_n_steps('/pressure/l_gripper_motor', \
rospy.get_rostime().to_time(), n_steps)
print 'selected.shpae', selected.shape
print selected
print 'done!'
class TimeSeriesClassifier:
def __init__(self):
rospy.init_node('time_series_classifier')
self.vectorizer = TimeSeriesVectorizer()
def pressure_vectorizer(pmsg):
return np.row_stack((np.matrix((pmsg.l_finger_tip)).T, np.matrix((pmsg.r_finger_tip)).T))
self.vectorizer.register_listener(pressure_vectorizer, pm.PressureState, \
'/pressure/l_gripper_motor', 15.)
#TEST this when you have robot time!
def run(self):
#models = self.models['models']
for state in range(len(self.models['models'])):
for chunk_idx in range(len(self.models['models'][state])):
xmat = self.fetch_data(state, chunk_idx)
print self.probability(xmat, state, chunk_idx)
def fetch_data(self, state, chunk_idx):
chunk_params = self.models['chunk_params']
n_steps = chunk_params['chunk_dim'][state][chunk_idx]
x_mat = self.vectorizer.get_n_steps('/pressure/l_gripper_motor', \
rospy.get_rostime().to_time(), n_steps)
return x_mat
def probability(self, x_mat, state, chunk_idx, successp):
models = self.models['models']
chunk_params = self.models['chunk_params']
#Subtract out the mean
x_vec = np.reshape(x_mat, (x_mat.shape[0] * x_mat.shape[1], 1)) - models[state][chunk_idx]['mean']
#project
projected_x = np.array((models[state][chunk_idx]['project'].T * x_vec).T)
succ_prob = models[state][chunk_idx]['kde'][0].evaluate(np.array(projected_x))
fail_prob = models[state][chunk_idx]['kde'][1].evaluate(np.array(projected_x))
succ_total = np.sum(models[state][chunk_idx]['labels']) / float(models[state][chunk_idx]['labels'].shape[1])
fail_total = 1 - succ_total
if successp:
prob = (succ_prob * succ_total) / ((fail_prob*fail_total) + (succ_total*succ_prob))
else:
prob = (fail_prob * fail_total) / ((fail_prob*fail_total) + (succ_total*succ_prob))
#print succ_prob, fail_prob, fail_total, succ_total
n_steps = chunk_params['chunk_dim'][state][chunk_idx]
print 'frame size %d state %d chunk %d prob %.3f' % (n_steps, state, chunk_idx, prob)
return prob
def save_models(self, name='timeseries_pca_model.pkl'):
print 'saving models'
ut.save_pickle(self.models, name)
def load_models(self, name='timeseries_pca_model.pkl'):
print 'loading models'
self.models = ut.load_pickle(name)
#print self.models.__class__, self.models.keys()
models = self.models['models']
for state in range(len(models.keys())):
for chunk_idx in range(len(models[state])):
#print models.keys()
reduced_data = models[state][chunk_idx]['reduced']
#labels = np.column_stack((np.matrix(np.ones((1, num_pos))), np.matrix(np.zeros((1, num_neg)))))
success_data = reduced_data[:, (np.where(models[state][chunk_idx]['labels'] > 0))[1].A1]
failure_data = reduced_data[:, (np.where(models[state][chunk_idx]['labels'] == 0))[1].A1]
models[state][chunk_idx]['kde'] = [kde.gaussian_kde(np.array(success_data)),
kde.gaussian_kde(np.array(failure_data))]
#models[state][chunk_idx]['tree'] = sp.KDTree(np.array(reduced_data.T))
def create_model(self, succ_pickle, fail_pickle):
print 'creating model...'
topic = '/pressure/l_gripper_motor'
SEGMENT_LENGTH = 1.0
VARIANCE_KEEP = .7
# load in pickle
print 'loading pickles'
successes = ut.load_pickle(succ_pickle)
failures = ut.load_pickle(fail_pickle)
#chop data set into little chunks
# data_sets[state][chunk_idx]['data', 'time'][chunk_record]
print 'preprocess pickles'
success_data_sets, failure_data_sets, chunk_params = self.preprocess_pickles(successes, \
failures, topic, SEGMENT_LENGTH)
# turn each set of records into a matrix
combined_sets = {}
for dset_name, datasets in zip(['success', 'failure'], [success_data_sets, failure_data_sets]):
#merge the two matrices from mat_set
mat_set = self.create_matrix_from_chunked_datasets(datasets)
for state in range(len(datasets.keys())):
if not combined_sets.has_key(state):
combined_sets[state] = {}
for chunk_idx in range(len(datasets[state])):
if not combined_sets[state].has_key(chunk_idx):
combined_sets[state][chunk_idx] = {}
combined_sets[state][chunk_idx][dset_name] = mat_set[state][chunk_idx]['data']
combined_sets[state][chunk_idx]['time'] = mat_set[state][chunk_idx]['time']
# run PCA over the entire set
models = {}
for state in range(len(combined_sets.keys())):
models[state] = []
for chunk_idx in range(len(combined_sets[state])):
print 'building model for state', state, 'chunk idx', chunk_idx
# pdb.set_trace()
data_chunk = np.column_stack((combined_sets[state][chunk_idx]['success'], \
combined_sets[state][chunk_idx]['failure']))
num_pos = combined_sets[state][chunk_idx]['success'].shape[1]
num_neg = combined_sets[state][chunk_idx]['failure'].shape[1]
labels = np.column_stack((np.matrix(np.ones((1, num_pos))), np.matrix(np.zeros((1, num_neg)))))
projection_basis, dmean = dimreduce.pca_vectors(data_chunk, VARIANCE_KEEP)
print 'pca_basis: number of dimensions', projection_basis.shape[1]
reduced_data = projection_basis.T * (data_chunk-dmean)
models[state].append({'time': combined_sets[state][chunk_idx]['time'],
'project': projection_basis,
'reduced': reduced_data,
'labels': labels,
'mean': dmean,
'data': data_chunk
#'tree': sp.KDTree(np.array(reduced_data.T))
})
self.models = {'models':models,
'chunk_params': chunk_params}
def create_matrix_from_chunked_datasets(self, datasets):
mat_set = {}
for state in range(len(datasets.keys())):
mat_set[state] = {}
for chunk_idx in range(len(datasets[state])):
records_l = []
for chunk_record in range(len(datasets[state][chunk_idx]['data'])):
a = datasets[state][chunk_idx]['data'][chunk_record]
records_l.append(np.reshape(a, (a.shape[0]*a.shape[1], 1)))
mat_set[state][chunk_idx] = {}
mat_set[state][chunk_idx]['data'] = np.column_stack(records_l)
mat_set[state][chunk_idx]['time'] = datasets[state][chunk_idx]['time']
return mat_set
def preprocess_pickles(self, successes, failures, topic, segment_length):
#Break matrices into segments based on state
# list of list of 44xN matrices
success_matrices_segmented_by_state = construct_list_of_segmented_matrices_from_trial_recording(successes, topic)
failure_matrices_segmented_by_state = construct_list_of_segmented_matrices_from_trial_recording(failures, topic)
#Calculate how long each chunk needs to be
success_trial_durations = find_trial_durations(successes, topic) # trial_durations[state][trial number]
failure_trial_durations = find_trial_durations(failures, topic) # trial_durations[state][trial number]
#for state in range(len(times_dict.keys())):
# durations = [state_times[-1] - state_times[0] for state_times in times_dict[state]]
#pdb.set_trace()
minimal_trial_lengths = [np.min(np.min(success_trial_durations[state]), np.min(failure_trial_durations[state])) \
for state in range(len(success_trial_durations.keys()))]
chunk_times = [length/np.floor(length/segment_length) for length in minimal_trial_lengths]
#make little chunks out of the matrices of pressure readings
success_data_sets = break_record_matrices_into_chunks(successes, success_matrices_segmented_by_state, \
minimal_trial_lengths, chunk_times, topic)
failure_data_sets = break_record_matrices_into_chunks(failures, failure_matrices_segmented_by_state, \
minimal_trial_lengths, chunk_times, topic)
#Make sure the little chunks are of the same dimension across positive and negative examples
chunk_dim = {}
for state in range(len(successes.keys())):
chunk_dim[state] = {}
for chunk_idx in range(len(success_data_sets[state])):
#figure minimum chunk lengths in array size
chunk_length_successes = [success_data_sets[state][chunk_idx]['data'][chunk_record].shape[1] \
for chunk_record in range(len(success_data_sets[state][chunk_idx]['data']))]
chunk_length_failures = [failure_data_sets[state][chunk_idx]['data'][chunk_record].shape[1] \
for chunk_record in range(len(failure_data_sets[state][chunk_idx]['data']))]
#pdb.set_trace()
shortest_chunk_length = np.min([np.min(chunk_length_successes), np.min(chunk_length_failures)])
chunk_dim[state][chunk_idx] = shortest_chunk_length
#shorten data to shortest_chunk_length
#if state == 0 and chunk_idx == 2:
# pdb.set_trace()
for state in range(len(successes.keys())):
for chunk_idx in range(len(success_data_sets[state])):
for dataset_idx, data_sets in enumerate([success_data_sets, failure_data_sets]):
for chunk_record in range(len(data_sets[state][chunk_idx]['data'])):
shortest_chunk_length = chunk_dim[state][chunk_idx]
data_sets[state][chunk_idx]['data'][chunk_record] = \
data_sets[state][chunk_idx]['data'][chunk_record][:,:shortest_chunk_length]
#if state == 0 and chunk_idx == 2:
# #pdb.set_trace()
# print data_sets[state][chunk_idx].__class__, 'len(data_sets[state][chunk_idx])', len(data_sets[state][chunk_idx]), 'dataset idx', dataset_idx, 'shortest_chunk_length', shortest_chunk_length, 'shape', data_sets[state][chunk_idx]['data'][chunk_record].shape
chunk_params = {'chunk_dim': chunk_dim,
'trial_lengths': minimal_trial_lengths,
'chunk_times': chunk_times,
'topic': topic}
return success_data_sets, failure_data_sets, chunk_params
def preprocess_individual_pickle(self, apickle):
data = ut.load_pickle(apickle)
#models, chunk_params = self.models
models = self.models['models']
chunk_params = self.models['chunk_params']
#break pickle into chunks given model.
#chunks of equivalent time, chunks of equivalent dimensions
data_segmented = construct_list_of_segmented_matrices_from_trial_recording(data, chunk_params['topic'])
# 1) break into time chunks
chunked_data = break_record_matrices_into_chunks(data, data_segmented, \
chunk_params['trial_lengths'], chunk_params['chunk_times'], chunk_params['topic'])
# 2) shorten into appropriate dimensions
for state in range(len(models)):
for chunk_idx in range(len(models[state])):
for chunk_record in range(len(chunked_data[state][chunk_idx]['data'])):
chunk_length = chunk_params['chunk_dim'][state][chunk_idx]
chunked_data[state][chunk_idx]['data'][chunk_record] =\
chunked_data[state][chunk_idx]['data'][chunk_record][:, :chunk_length]
return chunked_data
def classify_pickle(self, apickle):
# a pickle can have multiple records...
chunked_data = self.preprocess_individual_pickle(apickle)
#mat_set = self.create_matrix_from_chunked_datasets(chunked_data)
models = self.models['models']
total_ex = models[0][0]['labels'].shape[1]
pos_ex = np.sum(models[0][0]['labels'])
neg_ex = total_ex - pos_ex
prior_pos = pos_ex / float(total_ex)
prior_neg = neg_ex / float(total_ex)
results = {}
NEIGHBORS = 3
for record_idx in range(len(chunked_data[0][0]['data'])):
for state in range(len(models)):
if not results.has_key(state):
results[state] = {}
for chunk_idx in range(len(models[state])):
if not results[state].has_key(chunk_idx):
results[state][chunk_idx] = []
x_mat = chunked_data[state][chunk_idx]['data'][record_idx]
x_vec = np.reshape(x_mat, (x_mat.shape[0] * x_mat.shape[1], 1))
projected_x = np.array((models[state][chunk_idx]['project'].T * x_vec).T)
#match_idx = models[state][chunk_idx]['tree'].query(projected_x, NEIGHBORS)[1]
#success_density = estimate_density(models[state][chunk_idx]['prob_trees'][0], projected_x)
#failure_density = estimate_density(models[state][chunk_idx]['prob_trees'][1], projected_x)
# p(x | suc) * p (suc) / sum(), p (x | fail)
succ_prob = models[state][chunk_idx]['kde'][0].evaluate(np.array(projected_x))
fail_prob = models[state][chunk_idx]['kde'][1].evaluate(np.array(projected_x))
succ_total = np.sum(models[state][chunk_idx]['labels']) / float(models[state][chunk_idx]['labels'].shape[1])
fail_total = 1 - succ_total
prob = (succ_prob * succ_total) / ((fail_prob*fail_total) + (succ_total*succ_prob))
if np.isnan(prob):
prob = 0.
results[state][chunk_idx].append(prob > .5)
print 'record idx %d state %d chunk %d label prob %.2f success? %d' % (record_idx, state, chunk_idx, prob, prob > .5)
print '============================='
for state in range(len(models)):
for chunk_idx in range(len(models[state])):
correct = np.sum(results[state][chunk_idx])
all_val = float(len(results[state][chunk_idx]))
print all_val
print 'state %d chunk %d results %.3f' % (state, chunk_idx, correct/all_val)
def zero_out_time_in_trials(data_dict, topic):
#print 'Finding times...'
#pdb.set_trace()
times_dict = {} # times_dict[state][ list of durations ]
for state in range(len(data_dict.keys())):
times_dict[state] = []
for record_number in range(len(data_dict[state][topic])):
time_start = data_dict[0][topic][record_number]['t'][0]
#pdb.set_trace()
times_dict[state].append(data_dict[state][topic][record_number]['t'] - time_start)
return times_dict
def find_trial_durations(data_dict, topic):
times_dict = {} # times_dict[state][ list of durations ]
for state in range(len(data_dict.keys())):
times_dict[state] = []
for record_number in range(len(data_dict[state][topic])):
time_start = data_dict[state][topic][record_number]['t'][0]
time_end = data_dict[state][topic][record_number]['t'][-1]
times_dict[state].append(time_end - time_start)
return times_dict
def construct_pressure_marker_message(data_dict, topic, base_color=np.matrix([1.,0, 0, 1.]).T):
#record_number = 0
STATE_SEPARATION_DIST = .4
points_ll = []
colors_ll = []
pressures_l = []
#Record the duration of each trial
times_dict = zero_out_time_in_trials(data_dict, topic)
#Use the durations to figure out offsets
state_time_offsets = {}
state_time_offsets[-1] = {'duration':0, 'offset':0}
for state in range(len(times_dict.keys())):
durations = [state_times[-1] - state_times[0] for state_times in times_dict[state]]
duration_state = np.max(durations)
state_time_offsets[state] = {'duration': duration_state,
'offset': state_time_offsets[state-1]['offset'] + state_time_offsets[state-1]['duration'] + STATE_SEPARATION_DIST}
print 'state', state, 'offset', state_time_offsets[state]['offset'], 'duration', state_time_offsets[state]['duration']
#Create corrected timelines
times_m_list = []
for record_number in range(len(data_dict[0][topic])):
#For each state figure out the time offset & store in times_l
times_l = []
for i in range(len(data_dict.keys())):
#times_l.append(np.matrix(state_time_offsets[i]['offset'] + data_dict[i][topic][record_number]['t'] - data_dict[0][topic][record_number]['t'][0]))
curr_times = data_dict[i][topic][record_number]['t']
curr_times = curr_times - curr_times[0]
times_l.append(np.matrix(curr_times + state_time_offsets[i]['offset']))
#times_l.append(np.matrix( - data_dict[0][topic][record_number]['t'][0]))
#Stack times_l to form times_m
times_m = np.column_stack(times_l)
times_m_list.append(times_m)
print 'constructing segmented matrices...'
pressure_mats = []
for lp in construct_list_of_segmented_matrices_from_trial_recording(data_dict, topic):
p = np.column_stack(lp)
p = p - p[:,0]
pressure_mats.append(p)
print 'creating colored points...'
pressures, all_points, colors_mat = create_colored_3d_points_from_matrices(pressure_mats, times_m_list)
print 'creating pointcloud message'
#point_cloud = ru.np_to_colored_pointcloud(all_points, np.matrix(pressures) + min_pval, 'pressure_viz')
point_cloud = ru.np_to_colored_pointcloud(all_points, np.matrix(pressures), 'pressure_viz')
return point_cloud
class DisplayDataWithRviz:
def __init__(self):
rospy.init_node('display_pressure_with_rviz')
self.succ_marker = rospy.Publisher('succ_marker', vm.Marker)
self.fail_marker = rospy.Publisher('fail_marker', vm.Marker)
self.succ_pc_pub = rospy.Publisher('succ_pc', sm.PointCloud)
self.fail_pc_pub = rospy.Publisher('fail_pc', sm.PointCloud)
def display(self, succ_pickle, fail_pickle):
# load in pickle
print 'loading...'
successes = ut.load_pickle(succ_pickle)
failures = ut.load_pickle(fail_pickle)
print 'Enter the topic number:'
for i, k in enumerate(successes[0].keys()):
print i, k
topic = successes[0].keys()[int(raw_input())]
red = np.matrix([1.,0, 0, 1.]).T
green = np.matrix([0.,1., 0, 1.]).T
print 'construct_pressure_marker_message(successes, topic, green)'
#succ_marker, succ_pc = construct_pressure_marker_message(successes, topic, green)
succ_pc = construct_pressure_marker_message(successes, topic, green)
print 'construct_pressure_marker_message(failures, topic, red)'
#fail_marker, fail_pc = construct_pressure_marker_message(failures, topic, red)
fail_pc = construct_pressure_marker_message(failures, topic, red)
print 'publishing...'
r = rospy.Rate(10)
while not rospy.is_shutdown():
self.succ_pc_pub.publish(succ_pc)
self.fail_pc_pub.publish(fail_pc)
r.sleep()
##
# Create matrices split based on state
#
# @param data_dict [state number] [topic] [trial number] ['t' 'left' 'right']
# @param topic string
# @return segmented_matrices[record_number][state_number]['t'] => 1xN array
# ['mat'] => 44xN mat
def construct_list_of_segmented_matrices_from_trial_recording(data_dict, topic):
segmented_matrices = []
for record_number in range(len(data_dict[0][topic])):
segmented_matrix = []
trecs = []
for state in range(len(data_dict.keys())):
segmented_matrix.append(np.row_stack((data_dict[state][topic][record_number]['left'],
data_dict[state][topic][record_number]['right'])))
trecs.append(data_dict[state][topic][record_number]['t'])
segmented_matrices.append({'mat': segmented_matrix,
't': trecs})
#segmented_matrices.append(segmented_matrix)
return segmented_matrices
#def construct_marker_message_from_list_of_segmented_matrices(segmented_matrix, slot_number, column_index):
def create_colored_3d_points_from_matrices(matrices, index_list):
points3d_l = []
colors_ll = []
mat_l = []
X_MULTIPLIER = 1/15.
for i, mat in enumerate(matrices):
X, Y = np.meshgrid(range(mat.shape[0]), range(mat.shape[1]))
x_size = mat.shape[0] * X_MULTIPLIER
X = np.matrix(X * X_MULTIPLIER) + x_size * i + (i * x_size / 3.)
#Y = (np.matrix(np.ones((mat.shape[0], 1))) * times_m).T
Y = (np.matrix(np.ones((mat.shape[0], 1))) * index_list[i]).T
Z = np.matrix(np.zeros(mat.shape)).T
points = np.row_stack((X.reshape(1, X.shape[0] * X.shape[1]),
Y.reshape(1, Y.shape[0] * Y.shape[1]),
Z.reshape(1, Z.shape[0] * Z.shape[1])))
colors = np.matrix(np.zeros((4, mat.shape[0]*mat.shape[1])))
mat_l.append(mat.T.reshape((1,mat.shape[1] * mat.shape[0])))
points3d_l.append(points)
colors_ll.append(colors)
all_mats = np.column_stack(mat_l)
all_points = np.column_stack(points3d_l)
all_colors = np.column_stack(colors_ll)
return all_mats, all_points, all_colors
#data_dict [state number] [topic] [trial number] ['t' 'left' 'right']
def average_reading_over_trials(data_dict, topic):
#Construct list of list of matrices indexing [state][trial number] => contact information for both fingers
contact_info = {}
for state in data_dict.keys():
contact_info[state] = []
for trial in data_dict[state][topic]:
contact_info[state].append(np.row_stack((trial['left'], trial['right'])))
ret_dict = {}
#shorten the trials to be the length of the shortest trial
for state in contact_info.keys():
shortest_length = np.min([trial.shape[1] for trial in contact_info[state]])
trimmed_mats = [trial[:,:shortest_length] for trial in contact_info[state]]
avg_reading = np.matrix(np.sum(np.concatenate([np.reshape(np.array(trial), (trial.shape[0], trial.shape[1], 1)) for trial in trimmed_mats], 2), 2) / len(trimmed_mats))
div_point = avg_reading.shape[0]/2.
assert(div_point == 22)
ret_dict[state] = {topic: [{'t': data_dict[state][topic][0]['t'][:shortest_length] ,#contact_info[state][0][:shortest_length],
'left': avg_reading[:div_point,:],
'right': avg_reading[div_point:,:]}] }
return ret_dict
def subtract_records(recorda, recordb, topic):
ret_dict = {}
for state in recorda.keys():
shortest_length = min(recorda[state][topic][0]['left'].shape[1], recordb[state][topic][0]['left'].shape[1])
ret_dict[state] = {topic: [{
't': recorda[state][topic][0]['t'][:shortest_length],
'left': np.abs(recorda[state][topic][0]['left'][:,:shortest_length] - recordb[state][topic][0]['left'][:,:shortest_length]),
'right': np.abs(recorda[state][topic][0]['right'][:,:shortest_length] - recordb[state][topic][0]['right'][:,:shortest_length])
}]}
return ret_dict
#Debug this!
class DiffDisplay:
def __init__(self):
rospy.init_node('diff_display')
self.fail_marker = rospy.Publisher('diff_fail_avg', vm.Marker)
self.fail_pc_pub = rospy.Publisher('diff_fail_pc', sm.PointCloud)
self.succ_marker = rospy.Publisher('diff_succ_avg', vm.Marker)
self.succ_pc_pub = rospy.Publisher('diff_succ_pc', sm.PointCloud)
self.diff_marker = rospy.Publisher('diff_avg', vm.Marker)
self.diff_pc_pub = rospy.Publisher('diff_pc', sm.PointCloud)
def display(self, succ_pickle, fail_pickle):
# load in pickle
print 'loading...'
successes = ut.load_pickle(succ_pickle)
failures = ut.load_pickle(fail_pickle)
topics = ['/pressure/l_gripper_motor', '/pressure/r_gripper_motor']
topic = topics[0]
red = np.matrix([1., 0, 0, 1.]).T
green = np.matrix([0., 1., 0, 1.]).T
blue = np.matrix([0., 0, 1., 1.]).T
#data_dict [state number] [topic] [trial number] ['t' 'left' 'right']
succ_avg = average_reading_over_trials(successes, topic)
fail_avg = average_reading_over_trials(failures, topic)
diff_avg = subtract_records(succ_avg, fail_avg, topic)
succ_marker, succ_pc = construct_pressure_marker_message(succ_avg, topic, green)
fail_marker, fail_pc = construct_pressure_marker_message(fail_avg, topic, red)
diff_marker, diff_pc = construct_pressure_marker_message(diff_avg, topic, blue)
r = rospy.Rate(10)
print 'publishing...'
while not rospy.is_shutdown():
self.succ_marker.publish(succ_marker)
self.fail_marker.publish(fail_marker)
self.succ_pc_pub.publish(succ_pc)
self.fail_pc_pub.publish(fail_pc)
self.diff_marker.publish(diff_marker)
self.diff_pc_pub.publish(diff_pc)
r.sleep()
if __name__ == '__main__':
import sys
if 'preprocess' == sys.argv[1]:
print 'Loading success bags..'
succ_dict = success_failure_classification_preprocess(sys.argv[2])
print 'Saving success dict.'
ut.save_pickle(succ_dict, '%s/success_data.pkl' % sys.argv[2])
print 'Loading failure bags..'
fail_dict = success_failure_classification_preprocess(sys.argv[3])
print 'Saving failure dict.'
ut.save_pickle(fail_dict, '%s/failure_data.pkl' % sys.argv[3])
print 'Done!'
if 'learn' == sys.argv[1]:
classifier = TimeSeriesClassifier()
classifier.create_model(sys.argv[2], sys.argv[3])
classifier.save_models()
if 'test' == sys.argv[1]:
classifier = TimeSeriesClassifier()
classifier.load_models()
classifier.classify_pickle(sys.argv[2])
#Debug this to display failure cases
if 'display' == sys.argv[1]:
d = DisplayDataWithRviz()
#data_dict [state number] [topic] [trial number] ['t' 'left' 'right']
d.display(sys.argv[2], sys.argv[3])
#Dbug this so that it displays the average value, and the diff between the averages.
if 'diff' == sys.argv[1]:
d = DiffDisplay()
#data_dict [state number] [topic] [trial number] ['t' 'left' 'right']
d.display(sys.argv[2], sys.argv[3])
if 'run' == sys.argv[1]:
#t = TestOnlineClassification()
#t.start_classifying()
t = TimeSeriesClassifier()
t.load_models()
t.run()
#class VectorTimeSeriesClassifier:
#
# def __init__(self):
# self.data = None
# self.times = None
# self.start_time = None
#
# def add_data(self, t, mat):
# if self.data == None:
# self.data = mat
# self.times = [t]
# else:
# self.data = np.column_stack((self.data, mat))
# self.times.append(t)
#
# def start_collecting_data(self):
# self.start_time = rospy.get_rostime().to_time()
#
# def get_data(self, start_time_in_segment):
# #option 2
# #instead of gettting n data points back, can we interpolate?
# #given start time, end time, and number of points needed
# #we can interpolate to get those n points
#
# #option 1
# np.array(self.times) - rospy.get_rostime().to_time()
# current_time = rospy.get_rostime().to_time() - self.start_time
# start_time_in_segment
#need to keep track of time since start
#need to put data points into windows of data
#want data in consecutive matrices of the same size as what the classifier will
# expect if we're still using the same
# format
#a)
#collect messages into data
#break up chunks of data but keep it as one large matrix
#b)
#consume data chunks as long as there is enough
#get a message with a time after when we should expect messages
#spit out a chunk when there are enough data points
### how do we combine this event based classifier with RL?
## This seems better
# might get into dead locks?
# can we run this in one process?
# rl agent executes a state
# while classifier listen for messages
# rl agent checks for state estimate.
#
# rl agent executes a state
# waits for estimate...
# take state estimate then act.
| [
[
1,
0,
0.0012,
0.0012,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0012,
0.0012,
0,
0.66,
0.0303,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0023,
0.0012,
0,
0.... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import rospy",
"import glob",
"import hrl_lib.rutils as ru",
"import hrl_pr2_lib.devices as hpr2",
"import hai_sandbox.bag_processor as bp",
"import hrl_lib.util as ut",
"import visualization... |
import roslib; roslib.load_manifest('hai_sandbox')
import rospy
import rosrecord
import sys
f = open(sys.argv[1])
i = 0
for topic, message, time in rosrecord.logplayer(f):
i = i + 1
print topic, time
if i > 10:
break
f.close()
##
# In this bag, give me messages from these topics
# @param file_name
# @param topics
def bag_reader(file_name, topics):
f = open(file_name)
tdict = {}
for t in topics:
tdict[t] = True
for r in rosrecord.logplayer(f):
if tdict.has_key(r[0]):
yield r
f.close()
| [
[
1,
0,
0.0357,
0.0357,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0357,
0.0357,
0,
0.66,
0.1111,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0714,
0.0357,
0,
0.... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import rospy",
"import rosrecord",
"import sys",
"f = open(sys.argv[1])",
"i = 0",
"for topic, message, time in rosrecord.logplayer(f):\n i = i + 1\n print(topic, time)\n if i > 10:\n ... |
import roslib; roslib.load_manifest('hai_sandbox')
import rospy
import feature_extractor_fpfh.msg as fmsg
import hrl_lib.rutils as ru
from cv_bridge import CvBridge, CvBridgeError
import numpy as np
class KinectListener:
def __init__(self, topic=None):
if topic == None:
topic = 'fpfh_hist'
rate = .2
self.listener = ru.GenericListener('kinect_client', fmsg.FPFHHist, topic, rate)
self.bridge = CvBridge()
def read(self):
cur_time = rospy.Time.now().to_sec()
not_fresh = True
while not_fresh:
fpfh_hist = self.listener.read(allow_duplication=False, willing_to_wait=True, warn=False, quiet=True)
if not (fpfh_hist.header.stamp.to_sec() < cur_time):
not_fresh = False
else:
rospy.loginfo("fpfh message time is in the past by %.2f secs"% (cur_time - fpfh_hist.header.stamp.to_sec()))
histogram = np.matrix(fpfh_hist.histograms).reshape((fpfh_hist.hist_npoints, 33)).T
hist_points = np.matrix(fpfh_hist.hpoints3d).reshape((fpfh_hist.hist_npoints, 3)).T
points3d = np.matrix(fpfh_hist.origpoints).reshape((fpfh_hist.original_npoints, 3)).T
points3d = points3d[:, np.where(1-np.isnan(points3d))[1].A1]
cvimage_mat = self.bridge.imgmsg_to_cv(fpfh_hist.image, 'bgr8')
return {'histogram': histogram, 'hpoints3d': hist_points, 'points3d': points3d, 'image': cvimage_mat}
| [
[
1,
0,
0.0286,
0.0286,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0286,
0.0286,
0,
0.66,
0.1429,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0857,
0.0286,
0,
0.... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import rospy",
"import feature_extractor_fpfh.msg as fmsg",
"import hrl_lib.rutils as ru",
"from cv_bridge import CvBridge, CvBridgeError",
"import numpy as np",
"class KinectListener:\n def... |
import roslib; roslib.load_manifest('hai_sandbox')
from cv_bridge.cv_bridge import CvBridge, CvBridgeError
import cv
import sys
import hrl_lib.rutils as ru
import hai_sandbox.features as fea
forearm_cam_l = '/l_forearm_cam/image_rect_color'
ws_l = '/wide_stereo/left/image_rect_color'
ws_r = '/wide_stereo/right/image_rect_color'
fname = sys.argv[1]
bridge = CvBridge()
cv.NamedWindow('surf', 1)
cv.NamedWindow('harris', 1)
cv.NamedWindow('star', 1)
for topic, msg, t in ru.bag_iter(fname, [ws_l]):
image = bridge.imgmsg_to_cv(msg, 'bgr8')
image_gray = fea.grayscale(image)
surf_keypoints, surf_descriptors = fea.surf(image_gray)
cv.ShowImage('surf', fea.draw_surf(image, surf_keypoints, (255, 0, 0)))
harris_keypoints = fea.harris(image_gray)
cv.ShowImage('harris', fea.draw_harris(image, harris_keypoints, (0, 255, 0)))
cv.WaitKey(10)
| [
[
1,
0,
0.0333,
0.0333,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0333,
0.0333,
0,
0.66,
0.0667,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0667,
0.0333,
0,
0.... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"from cv_bridge.cv_bridge import CvBridge, CvBridgeError",
"import cv",
"import sys",
"import hrl_lib.rutils as ru",
"import hai_sandbox.features as fea",
"forearm_cam_l = '/l_forearm_cam/image_... |
import roslib; roslib.load_manifest('hai_sandbox')
import cv
import sys
import hai_sandbox.features as fea
if __name__ == '__main__':
fname = sys.argv[1]
image = cv.LoadImage(fname)
image_gray = cv.CreateImage((640,480), cv.IPL_DEPTH_8U,1)
cv.CvtColor(image, image_gray, cv.CV_BGR2GRAY)
star_keypoints = fea.star(image)
surf_keypoints, surf_descriptors = fea.surf(image_gray)
harris_keypoints = fea.harris(image_gray)
cv.NamedWindow('surf', 1)
cv.NamedWindow('harris', 1)
cv.NamedWindow('star', 1)
while True:
cv.ShowImage('surf', fea.draw_surf(image, surf_keypoints, (255, 0, 0)))
cv.ShowImage('harris', fea.draw_harris(image, harris_keypoints, (0, 255, 0)))
cv.ShowImage('star', fea.draw_star(image, star_keypoints, (0, 0, 255)))
k = cv.WaitKey(33)
if k == 27:
break
#Canny(image, edges, threshold1, threshold2, aperture_size=3) => None
| [
[
1,
0,
0.037,
0.037,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.037,
0.037,
0,
0.66,
0.2,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0741,
0.037,
0,
0.66,
... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import cv",
"import sys",
"import hai_sandbox.features as fea",
"if __name__ == '__main__':\n fname = sys.argv[1]\n image = cv.LoadImage(fname)\n image_gray = cv.CreateImage((640,480), cv.... |
import roslib; roslib.load_manifest('hai_sandbox')
import rospy
import hrl_lib.util as ut
import csv
import scipy.spatial as sp
import hrl_camera.ros_camera as cam
import hai_sandbox.features as fea
import numpy as np
import cv
import hrl_lib.rutils as ru
from cv_bridge.cv_bridge import CvBridge, CvBridgeError
import scipy.cluster.vq as vq
def csv_bag_names(fname):
csv_file = open(fname)
for bag_name in csv.reader(csv_file):
yield bag_name
csv_file.close()
def features_mat_compress(fmat, k):
#k = max(int(round(fmat.shape[1] * percent)), 1)
rospy.loginfo('compressing to %d centers' % k)
center_indices = np.random.permutation(fmat.shape[1])[0:k]
initial_centers = fmat[:, center_indices]
kresults = vq.kmeans(np.array(fmat.T), np.array(initial_centers.T))
return np.matrix(kresults[0]).T
if __name__ == '__main__':
import sys
import pdb
features_file = sys.argv[1]
images_file = sys.argv[2]
features_db = np.column_stack([ut.load_pickle(p[0]) for p in csv_bag_names(features_file)])
features_db_reduced = features_mat_compress(features_db, 500)
#Generate a random color for each feature
colors = np.matrix(np.random.randint(0, 255, (3, features_db_reduced.shape[1])))
features_tree = sp.KDTree(np.array(features_db_reduced.T))
bridge = CvBridge()
forearm_cam_l = '/l_forearm_cam/image_rect_color'
cv.NamedWindow('surf', 1)
#import pdb
#while not rospy.is_shutdown():
i = 0
for topic, msg, t in ru.bag_iter(images_file, [forearm_cam_l]):
image = bridge.imgmsg_to_cv(msg, 'bgr8')
#image = camera.get_frame()
image_gray = fea.grayscale(image)
surf_keypoints, surf_descriptors = fea.surf(image_gray)
#print len(surf_keypoints)
#pdb.set_trace()
#match each keypoint with one in our db & look up color
matching_idx = [features_tree.query(d)[1] for d in surf_descriptors]
coordinated_colors = colors[:, matching_idx]
#nimage = fea.draw_surf(image, surf_keypoints, (0,255,0))
nimage = fea.draw_surf2(image, surf_keypoints, coordinated_colors)
cv.ShowImage('surf', nimage)
cv.SaveImage('forearm_cam%d.png' % i, nimage)
i = i + 1
cv.WaitKey(10)
#rospy.init_node('test11')
#camera = cam.ROSImageClient(forearm_cam_l)
| [
[
1,
0,
0.0139,
0.0139,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0139,
0.0139,
0,
0.66,
0.0667,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0278,
0.0139,
0,
0.... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import rospy",
"import hrl_lib.util as ut",
"import csv",
"import scipy.spatial as sp",
"import hrl_camera.ros_camera as cam",
"import hai_sandbox.features as fea",
"import numpy as np",
"i... |
import roslib; roslib.load_manifest('hai_sandbox')
import rospy
import hrl_lib.util as ut
import sys
if __name__ == '__main__':
p = ut.load_pickle(sys.argv[1])
| [
[
1,
0,
0.125,
0.125,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.125,
0.125,
0,
0.66,
0.2,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.25,
0.125,
0,
0.66,
0.... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import rospy",
"import hrl_lib.util as ut",
"import sys",
"if __name__ == '__main__':\n p = ut.load_pickle(sys.argv[1])",
" p = ut.load_pickle(sys.argv[1])"
] |
import roslib; roslib.load_manifest('hai_sandbox')
import cv
import numpy as np
import scipy.spatial as sp
import math
class SURFMatcher:
def __init__(self):
self.model_images = {}
self.model_fea = {}
def add_file(self, model_name, label):
model_img = cv.LoadImage(model_name)
self.add_model(model_img, label)
def add_model(self, model_img, label):
mgray = grayscale(model_img)
m_loc, m_desc = surf(mgray)
self.model_images[label] = model_img
self.model_fea[label] = {'loc': m_loc, 'desc': m_desc}
def build_db(self):
fea_l = []
labels_l = []
locs_l = []
for k in self.model_fea:
fea_l.append(np.array(self.model_fea[k]['desc']))
locs_l.append(np.array(self.model_fea[k]['loc']))
labels_l.append(np.array([k for i in range(len(self.model_fea[k]['desc']))]))
self.labels = np.row_stack(labels_l)
self.locs = np.row_stack(locs_l)
self.tree = sp.KDTree(np.row_stack(fea_l))
def match(self, desc, thres=.6):
dists, idxs = self.tree.query(np.array(desc), 2)
ratio = dists[0] / dists[1]
if ratio < threshold:
desc = self.tree.data[idxs[0]]
loc = self.locs[idxs[0]]
return desc, loc
else:
return None
def concat_images(a, b):
img_height = max(a.height, b.height)
c = cv.CreateImage((a.width+b.width, img_height), a.depth, a.channels)
a_area = cv.GetSubRect(c, (0,0, a.width, a.height))
b_area = cv.GetSubRect(c, (a.width, 0, b.width, b.height))
cv.Add(a, a_area, a_area)
cv.Add(b, b_area, b_area)
return c
def clone(something):
if something.__class__ == cv.cvmat:
return cv.CloneMat(something)
else:
return cv.CloneImage(something)
def draw_surf(image, keypoints, color):
rimage = clone(image)
for loc, lap, size, d, hess in keypoints:
loc = tuple(np.array(np.round(loc), dtype='int').tolist())
circ_rad = int(round(size/4.))
cv.Circle(rimage, loc, circ_rad, color, 1, cv.CV_AA)
cv.Circle(rimage, loc, 2, color, -1, cv.CV_AA)
drad = math.radians(d)
line_len = circ_rad
loc_end = (np.matrix(np.round( circ_rad * np.matrix([np.cos(drad), np.sin(drad)]).T + np.matrix(loc).T), dtype='int')).A1.tolist()
cv.Line(rimage, loc, tuple(loc_end), color, thickness=1, lineType=cv.CV_AA)
return rimage
def draw_surf2(image, keypoints, colors):
rimage = clone(image)
for i, k in enumerate(keypoints):
loc, lap, size, d, hess = k
loc = tuple(np.array(np.round(loc), dtype='int').tolist())
c = tuple(np.matrix(colors[:,i],dtype='int').T.A1)
color = (int(c[0]), int(c[1]), int(c[2]))
#cv.Circle(rimage, loc, int(round(size/2.)), color, 1, cv.CV_AA)
cv.Circle(rimage, loc, 5, color, 1, cv.CV_AA)
return rimage
def draw_harris(image, keypoints, color):
rimage = clone(image)
for loc in keypoints:
loc = tuple(np.array(np.round(loc), dtype='int').tolist())
cv.Circle(rimage, loc, 5, color, 1, cv.CV_AA)
return rimage
def draw_star(image, keypoints, color):
rimage = clone(image)
color_arr = np.array(color)
max_resp = - 999999
min_resp = 999999
for _, _, response in keypoints:
max_resp = max(response, max_resp)
min_resp = min(response, min_resp)
range_resp = max_resp - min_resp
for loc, size, response in keypoints:
loc = tuple(np.array(np.round(loc), dtype='int').tolist())
color_weight = ((response - min_resp) / range_resp)
c = tuple((color_weight * color_arr).tolist())
cv.Circle(rimage, loc, int(round(size/2.0)), c, 1, cv.CV_AA)
return rimage
#list of ((x,y), size, response)
def star(image):
star_stor = cv.CreateMemStorage()
star_keypoints = cv.GetStarKeypoints(image, star_stor) #list of ((x,y), size, response)
del star_stor
return star_keypoints
##
# surf_keypoints => keypoints (x,y), laplacian, size, direction , hessian
# surf_descriptors => list of len 128 lists
def surf(image_gray, params=(1, 3000,3,4)):
surf_stor = cv.CreateMemStorage()
surf_r = cv.ExtractSURF(image_gray, None, surf_stor, params)
del surf_stor
return surf_r
##
# @param image image
# @param params surf params
def surf_color(image, params=(1,3000,3,4)):
gray = grayscale(image)
return surf(gray, params)
##
# list of (x, y)
def harris(image_gray):
eig_image = cv.CreateImage(cv.GetSize(image_gray), cv.IPL_DEPTH_32F, 1)
temp_image = cv.CreateImage(cv.GetSize(image_gray), cv.IPL_DEPTH_32F, 1)
return cv.GoodFeaturesToTrack(image_gray, eig_image, temp_image, 300, .1, 1.0, useHarris = True) #list of (x,y)
def grayscale(image):
image_gray = cv.CreateImage(cv.GetSize(image), cv.IPL_DEPTH_8U,1)
cv.CvtColor(image, image_gray, cv.CV_BGR2GRAY)
return image_gray
| [
[
1,
0,
0.007,
0.007,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.007,
0.007,
0,
0.66,
0.0588,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.014,
0.007,
0,
0.66,
... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import cv",
"import numpy as np",
"import scipy.spatial as sp",
"import math",
"class SURFMatcher:\n def __init__(self):\n self.model_images = {}\n self.model_fea = {}\n\n def... |
import roslib; roslib.load_manifest('hai_sandbox')
import cv
import sys
import os.path as pt
img_path = sys.argv[1]
print 'loading', img_path
img = cv.LoadImageM(img_path)
dst = cv.CloneMat(img)
dif = cv.CloneMat(img)
cv.Smooth(img, dst, cv.CV_GAUSSIAN, 91)
cv.Sub(img, dst, dif)
cv.SaveImage(img_path, dif)
#orig_path, fname = pt.split(img_path)
#name = pt.splitext(fname)[0]
#pt.join(orig_path, name)
| [
[
1,
0,
0.05,
0.05,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.05,
0.05,
0,
0.66,
0.0833,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.1,
0.05,
0,
0.66,
0.166... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import cv",
"import sys",
"import os.path as pt",
"img_path = sys.argv[1]",
"print('loading', img_path)",
"img = cv.LoadImageM(img_path)",
"dst = cv.CloneMat(img)",
"dif = cv.CloneMat(img)"... |
import roslib; roslib.load_manifest('hai_sandbox')
import hrl_lib.util as ut
import pylab as pb
import numpy as np
import pdb
def conf_to_percent(rec):
conf = rec['mat']
conf[0,:] = conf[0,:] / rec['neg']
conf[1,:] = conf[1,:] / rec['pos']
return conf[0,0], conf[1,1]
def plot_classifier_performance(fname, pname, plot_all):
results = ut.load_pickle(fname)
#pdb.set_trace()
#results['train_set_statistics'] # [ {'conf', 'size'}, {}...]
#results['current_scan_statistics'] # [ {'conf'} {}...]
#results['perf_on_other_scans'] # [[{'name', 'conf'}, {}...] [{} {}...]...]
#where conf is {'mat', 'neg', 'pos'}
scores = {}
for rlist in results['perf_on_other_scans']:
for d in rlist:
if scores.has_key(d['name']):
scores[d['name']].append(conf_to_percent(d['conf']))
else:
scores[d['name']] = [conf_to_percent(d['conf'])]
for k in scores.keys():
scores[k] = zip(*scores[k])
if results.has_key('train_set_statistics'):
train_neg, train_pos = zip(*[conf_to_percent(d['conf']) for d in results['train_set_statistics']])
else:
train_neg = train_pos = None
if results.has_key('current_scan_statistics'):
pdb.set_trace()
test_neg, test_pos = zip(*[conf_to_percent(d['conf']) for d in results['current_scan_statistics']])
else:
test_neg = test_pos = None
n_iterations = np.array(range(len(results['train_set_statistics'])))
#======================================================================
pb.figure(1)
if results.has_key('train_set_statistics'):
pb.plot(n_iterations, train_neg, label='train ' + pname)
if test_neg != None:
pb.plot(n_iterations, test_neg, label='test ' + pname)
if plot_all:
for i, k in enumerate(scores.keys()):
pb.plot(n_iterations, scores[k][0], '--', label=str(i))
#if results.has_key('current_scan_statistics'):
if results.has_key('converged_at_iter'):
pb.plot([results['converged_at_iter'], results['converged_at_iter']], [0., 1.], 'r')
pb.title('True negatives')
pb.legend()
#======================================================================
pb.figure(2)
if train_pos != None:
pb.plot(n_iterations, train_pos, label='train ' + pname)
if test_pos != None:
pb.plot(n_iterations, test_pos, label='test ' + pname)
#if results.has_key('current_scan_statistics'):
print 'mapping from dataset to id'
if plot_all:
for i, k in enumerate(scores.keys()):
pb.plot(n_iterations, scores[k][1], '--', label=str(i))
print 'ID', i, 'dataset', k
if results.has_key('converged_at_iter'):
pb.plot([results['converged_at_iter'], results['converged_at_iter']], [0., 1.], 'r')
pb.title('True positives')
pb.legend()
def plot_features_perf(fnames, pnames):
all_scores = {}
dset_names = None
for fname, pname in zip(fnames, pnames):
results = ut.load_pickle(fname)
train_neg, train_pos = zip(*[conf_to_percent(d['conf']) for d in results['train_set_statistics']])
scores = {}
for rlist in results['perf_on_other_scans']:
for d in rlist:
if scores.has_key(d['name']):
scores[d['name']].append(conf_to_percent(d['conf']))
else:
scores[d['name']] = [conf_to_percent(d['conf'])]
for k in scores.keys():
scores[k] = zip(*scores[k])
scores['train'] = [(train_neg), (train_pos)]
all_scores[pname] = scores
if dset_names == None:
dset_names = scores.keys()
neg_by_dset = {}
for n in dset_names:
posn = []
for pname in pnames:
posn.append(all_scores[pname][n][0][0])
neg_by_dset[n] = posn
pos_by_dset = {}
for n in dset_names:
posn = []
for pname in pnames:
posn.append(all_scores[pname][n][1][0])
pos_by_dset[n] = posn
ind = np.arange(len(pnames))
width = 0.05
fig = pb.figure(1)
ax = fig.add_subplot(111)
rects=[]
for i, name in enumerate(dset_names):
rect = ax.bar(ind+(width*i), pos_by_dset[name], width, color=tuple(np.random.rand(3).tolist()))
rects.append(rect)
ax.set_ylabel('accuracy')
ax.set_title('True positives by dataset and features used')
ax.set_xticks(ind+width)
ax.set_xticklabels(tuple(pnames))
fig = pb.figure(2)
ax = fig.add_subplot(111)
rects=[]
for i, name in enumerate(dset_names):
rect = ax.bar(ind+(width*i), neg_by_dset[name], width, color=tuple(np.random.rand(3).tolist()))
rects.append(rect)
ax.set_ylabel('accuracy')
ax.set_title('True negatives by dataset and features used')
ax.set_xticks(ind+width)
ax.set_xticklabels(tuple(pnames))
if __name__ == '__main__':
import sys
import optparse
p = optparse.OptionParser()
p.add_option("-m", "--mode", action="store", type="string")
p.add_option("-f", "--file", action="append", type="string")
p.add_option('-n', '--name', action="append", type="string")
opt, args = p.parse_args()
if opt.mode == 'active':
if len(opt.file) <= 1:
plot_all = True
else:
plot_all = False
for i in range(len(opt.file)):
plot_classifier_performance(opt.file[i], opt.name[i], plot_all)
pb.show()
if opt.mode == 'features':
plot_features_perf(opt.file, opt.name)
pb.show()
#For comparing between different algorithms, don't need to plot performance on all scans just
| [
[
1,
0,
0.0057,
0.0057,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0057,
0.0057,
0,
0.66,
0.1111,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0115,
0.0057,
0,
0.... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import hrl_lib.util as ut",
"import pylab as pb",
"import numpy as np",
"import pdb",
"def conf_to_percent(rec):\n conf = rec['mat']\n conf[0,:] = conf[0,:] / rec['neg']\n conf[1,:] = co... |
import roslib; roslib.load_manifest('hai_sandbox')
import rospy
import hrl_lib.util as hru
import pylab as pb
import numpy as np
import itertools as it
import hrl_lib.rutils as ru
import sys
from cv_bridge.cv_bridge import CvBridge, CvBridgeError
import scipy.spatial as sp
import cv
##
# @return mat, mat, array
def contact_mat(contact_msgs):
#start_time = contact_msgs[0].header.stamp.to_time()
times = np.array([c.header.stamp.to_time() for c in contact_msgs]) #- start_time
left, right = zip(*[[list(c.l_finger_tip), list(c.r_finger_tip)] for c in contact_msgs])
left = np.matrix(left).T
right = np.matrix(right).T
return left, right, times
##
# @return array, array
def find_contact_times(left_mat, right_mat, times):
left_mat = left_mat - left_mat[:, 0]
right_mat = right_mat - right_mat[:,0]
#When/where did contact happen?
#TODO: we are assuming just one finger of one arm here!
loc_r, time_c = np.where(np.abs(left_mat) > 250)
times_contact = times[time_c.A1]
return loc_r, times_contact
def group_by_first_el(a_list):
d = {}
for el in a_list:
if not d.has_key(el[0]):
d[el[0]] = []
d[el[0]].append(el)
return d
def get_closest_msgs(fname, topics, times):
times_set = set(times)
for top, msg, t in ru.bag_iter(fname, topics):
msg_time = msg.header.stamp.to_time()
if len(times_set.intersection([msg_time])) > 0:
yield msg
def find_contact_images(bag_name, contact_times, all_times, topic_name):
print 'finding closest images for ', topic_name
times_tree = sp.KDTree(np.matrix(all_times).T)
closest_times = [all_times[times_tree.query([a_time])[1]] for a_time in contact_times]
pdb.set_trace()
print 'getting & saving images, expecting', len(set(closest_times)), 'images'
bridge = CvBridge()
cleaned_topic_name = topic_name.replace('/', '')
i = 0
for ros_msg in get_closest_msgs(bag_name, [topic_name], closest_times):
i = i + 1
msg_time = ros_msg.header.stamp.to_time() - all_times[0]
cv_image = bridge.imgmsg_to_cv(ros_msg, 'bgr8')
img_name = "%s_%.3f_touched.png" % (cleaned_topic_name, msg_time)
print 'writing', img_name
cv.SaveImage(img_name, cv_image)
print 'got', i, 'images'
fname = sys.argv[1]
fname_wide = sys.argv[2]
#fname_cloud = sys.argv[3]
press_lt = '/pressure/l_gripper_motor'
press_rt = '/pressure/r_gripper_motor'
forearm_cam_l = '/l_forearm_cam/image_rect_color'
ws_l = '/wide_stereo/left/image_rect_color'
ws_r = '/wide_stereo/right/image_rect_color'
cloud_top = '/full_cloud'
print 'reading pressure messages'
#Get the pressure messages
msgs_dict = ru.bag_sel(fname, [press_lt, press_rt])
#Get the image times
print 'getting image times'
all_cam_times = group_by_first_el([[top, msg.header.stamp.to_time()] for top, msg, t in ru.bag_iter(fname_wide, [ws_l, ws_r])])
all_cam_times[forearm_cam_l] = [[top, msg.header.stamp.to_time()] for top, msg, t in ru.bag_iter(fname, [forearm_cam_l])]
[msg.header.stamp.to_time() for top, msg, t in ru.bag_iter(fname, ['/wide_stereo/left/image_raw'])][0:4]
print 'processing pressure'
press_lmsgs = [msg for top, msg, t in msgs_dict[press_lt]]
press_rmsgs = [msg for top, msg, t in msgs_dict[press_rt]]
#ll_mat contains (contact_loc, contact_times)
ll_mat, lr_mat, times_l = contact_mat(press_lmsgs)
rl_mat, rr_mat, times_r = contact_mat(press_rmsgs)
contact_loc, times_contact_pressure = find_contact_times(ll_mat, lr_mat, times_l)
print 'contact loc', contact_loc
#figure out which images are closest in time
#note: each row is an instance in KDTrees, query return ([distance], [indices])
import pdb
pdb.set_trace()
#Maybe just get the range of messages around contact time? +/- a couple of messages? make that a bag?
find_contact_images(fname, times_contact_pressure.copy(), [t for top, t in all_cam_times[forearm_cam_l]], forearm_cam_l)
find_contact_images(fname_wide, times_contact_pressure.copy(), [t for top, t in all_cam_times[ws_l]], ws_l)
find_contact_images(fname_wide, times_contact_pressure.copy(), [t for top, t in all_cam_times[ws_r]], ws_r)
print 'plotting'
#Plot readings
pb.figure()
for i in range(ll_mat.shape[0]):
pb.plot(times_l, ll_mat[i,:].T.A1, label=str(i))
pb.legend()
pb.show()
| [
[
1,
0,
0.0081,
0.0081,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0081,
0.0081,
0,
0.66,
0.0213,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0163,
0.0081,
0,
0.... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import rospy",
"import hrl_lib.util as hru",
"import pylab as pb",
"import numpy as np",
"import itertools as it",
"import hrl_lib.rutils as ru",
"import sys",
"from cv_bridge.cv_bridge imp... |
import roslib; roslib.load_manifest('hai_sandbox')
import rospy
import sensor_msgs.msg as sm
import hrl_lib.rutils as ru
import numpy as np
import pr2_msgs.msg as pm
import geometry_msgs.msg as gm
import tf
import hrl_lib.tf_utils as tfu
import tf.transformations as tr
import time
import hrl_lib.util as ut
import pdb
def np_to_pointcloud(points_mat, frame):
pc = sm.PointCloud()
pc.header.stamp = rospy.get_rostime()
pc.header.frame_id = frame
#pdb.set_trace()
for i in range(points_mat.shape[1]):
p32 = gm.Point32()
p32.x = points_mat[0,i]
p32.y = points_mat[1,i]
p32.z = points_mat[2,i]
pc.points.append(p32)
return pc
if __name__ == '__main__':
#load pickle
import sys
#import pdb
pname = sys.argv[1]
#which frame are these contact points in? (base_link)
scene, contact_points = ut.load_pickle(pname)
#pdb.set_trace()
#t, tip_locs
# [len4 list, len4 list... ]
#plot 3D cloud & contact location! = > using? rviz?
rospy.init_node('test10')
contact_pub = rospy.Publisher('contact_cloud', sm.PointCloud)
touchll_pub = rospy.Publisher('touch_ll', sm.PointCloud)
touchlr_pub = rospy.Publisher('touch_lr', sm.PointCloud)
left_contact, right_contact = zip(*[(np.matrix(l[1][2]).T, np.matrix(l[1][3]).T) for l in contact_points])
left_contact = np.column_stack(left_contact)
right_contact = np.column_stack(right_contact)
scene_pc = np_to_pointcloud(scene, 'base_footprint')
left_con_pc = np_to_pointcloud(left_contact, 'base_footprint')
right_con_pc = np_to_pointcloud(right_contact, 'base_footprint')
r = rospy.Rate(10)
rospy.loginfo('test10: publishing')
while not rospy.is_shutdown():
contact_pub.publish(scene_pc)
touchll_pub.publish(left_con_pc)
touchlr_pub.publish(right_con_pc)
r.sleep()
| [
[
1,
0,
0.01,
0.01,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.01,
0.01,
0,
0.66,
0.0667,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.02,
0.01,
0,
0.66,
0.13... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import rospy",
"import sensor_msgs.msg as sm",
"import hrl_lib.rutils as ru",
"import numpy as np",
"import pr2_msgs.msg as pm",
"import geometry_msgs.msg as gm",
"import tf",
"import hrl_l... |
import roslib; roslib.load_manifest('hai_sandbox')
import rospy
import actionlib
import pr2_controllers_msgs.msg as pr2m
import trajectory_msgs.msg as tm
import sensor_msgs.msg as sm
import cv
class Arm:
def __init__(self, name):
self.joint_names = rospy.get_param('/%s/joints' % name)
self.client = actionlib.SimpleActionClient('/%s/joint_trajectory_action' % name, pr2m.JointTrajectoryAction)
rospy.loginfo('waiting for server')
self.client.wait_for_server()
self.recorded = []
class JointTrajRecordReplay:
def __init__(self):
self.left_arm = Arm('l_arm_controller')
self.right_arm = Arm('r_arm_controller')
self.names_index = None
rospy.Subscriber("joint_states", sm.JointState, self.joint_state_cb)
cv.NamedWindow('keyboard_input', 1)
self.exit = False
def rarm_goal(self, g):
self.right_arm.client.send_goal(g)
self.right_arm.client.wait_for_result()
return self.right_arm.client.get_result()
def get_joint_states(self, msg):
if self.names_index == None:
self.names_index = {}
for i, n in enumerate(msg.name):
self.names_index[n] = i
positions = [[msg.position[self.names_index[n]] for n in names_list] for names_list in [self.right_arm.joint_names, self.left_arm.joint_names]]
rpos = positions[0]
lpos = positions[1]
return lpos, rpos
def construct_points(self, posl, tstep):
points = [tm.JointTrajectoryPoint() for i in range(len(posl))]
for i in range(len(posl)):
points[i].positions = posl[i]
points[i].velocities = [0 for j in range(7)]
for i in range(len(posl)):
points[i].time_from_start = rospy.Duration(i*tstep)
return points
def joint_state_cb(self, msg):
k = chr(cv.WaitKey(1) & 0xff)
if k == 'r':
lpos, rpos = self.get_joint_states(msg)
self.left_arm.recorded.append(lpos)
self.right_arm.recorded.append(rpos)
rospy.loginfo('Recorded \nr: %s \nl: %s' % (str(rpos), str(lpos)))
elif k == chr(27):
self.exit = True
elif k == 'p':
#Construct points
lpos, rpos = self.get_joint_states(msg)
rospy.loginfo('playing back')
tstep = 2.0
l = list(self.right_arm.recorded)
l.append(self.right_arm.recorded[0])
l.insert(0, rpos)
points = self.construct_points(l, tstep)
g = pr2m.JointTrajectoryGoal()
g.trajectory.joint_names = self.right_arm.joint_names
g.trajectory.points = points
#g.trajectory.header.stamp = rospy.get_rostime() + rospy.Duration(len(l) * tstep)
g.trajectory.header.stamp = rospy.get_rostime() + rospy.Duration(0)
self.right_arm.client.send_goal(g)
if __name__ == '__main__':
try:
rospy.init_node('traj_client')
jtr = JointTrajRecordReplay()
r = rospy.Rate(10)
while not rospy.is_shutdown():
r.sleep()
if jtr.exit:
rospy.loginfo('exiting')
break
except rospy.ROSInterruptException:
print 'prog interrupted'
# def sample_goal(self):
# points = [tm.JointTrajectoryPoint() for i in range(3)]
# points[0].positions = [-.21, .44, -.56, -1.03, -13.1, -.089, -10.1377]
# points[1].positions = [-.21, .21, -.51, -1.55, -13.18, -.856, -10.1]
# points[2].positions = [-.21, .44, -.56, -1.03, -13.1, -.089, -10.1377]
# for i in range(3):
# points[i].velocities = [0 for j in range(7)]
#
# g = pr2m.JointTrajectoryGoal()
# g.trajectory.joint_names = self.right_arm.joint_names
# g.trajectory.points = points
# g.trajectory.header.stamp = rospy.get_rostime() + rospy.Duration(3.)
# g.trajectory.points[0].time_from_start = rospy.Duration(2.0/2)
# g.trajectory.points[1].time_from_start = rospy.Duration(4.0/2)
# g.trajectory.points[2].time_from_start = rospy.Duration(6.0/2)
# return g
#
| [
[
1,
0,
0.0075,
0.0075,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0075,
0.0075,
0,
0.66,
0.1,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.015,
0.0075,
0,
0.66,
... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import rospy",
"import actionlib",
"import pr2_controllers_msgs.msg as pr2m",
"import trajectory_msgs.msg as tm",
"import sensor_msgs.msg as sm",
"import cv",
"class Arm:\n def __init__(se... |
import roslib; roslib.load_manifest('hai_sandbox')
import rospy
import message_filters
from sensor_msgs.msg import Image
from sensor_msgs.msg import PointCloud2
import feature_extractor_fpfh.msg as fmsg
import pdb
def callback(image, fpfh_hist):
print "got messages!"
#print image.header.frame_id, fpfh_hist.header.frame_id
print image.header.stamp.to_sec(), fpfh_hist.header.stamp.to_sec()
def fpfh_cb(fpfh):
#print fpfh.header.frame_id
print '>>', fpfh.header.stamp.to_sec()
#pdb.set_trace()
def image_cb(image):
#print "image", image.header.frame_id
print image.header.stamp.to_sec()
rospy.init_node('kinect_features')
image_sub = message_filters.Subscriber('/camera/rgb/image_color', Image)
fpfh_hist_sub = message_filters.Subscriber('fpfh_hist', fmsg.FPFHHist)
depth_sub = message_filters.Subscriber('/camera/depth/points2', PointCloud2)
#ts = message_filters.TimeSynchronizer([image_sub, fpfh_hist_sub], 10)
ts = message_filters.TimeSynchronizer([image_sub, depth_sub], 10)
ts.registerCallback(callback)
#rospy.Subscriber('fpfh_hist', fmsg.FPFHHist, fpfh_cb)
#rospy.Subscriber('/camera/rgb/image_color', Image, image_cb)
print 'reading and spinning!'
rospy.spin()
| [
[
1,
0,
0.0278,
0.0278,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0278,
0.0278,
0,
0.66,
0.0556,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0556,
0.0278,
0,
0.... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import rospy",
"import message_filters",
"from sensor_msgs.msg import Image",
"from sensor_msgs.msg import PointCloud2",
"import feature_extractor_fpfh.msg as fmsg",
"import pdb",
"def callba... |
import numpy as np
import pdb
def max_wrt_constrain(a, b, constraint_f, toler):
return find_split(a, None, b, None, constraint_f, toler)
def min_wrt_constrain(a, b, constraint_f, toler):
return find_split(b, None, a, None, constraint_f, toler)
def find_split(a, qa, b, qb, constraint_f, toler):
#pdb.set_trace()
print 'a', a, qa, 'b', b, qb
#assume we're maximizing (or going towards b)
if abs(b - a) < toler:
if qb == True:
return b
elif qa == True:
return a
else:
raise RuntimeError('min interval reached without finding a point that returns success')
else:
nqb = constraint_f(b)
if nqb:
return b
else:
mid = (a + b) / 2.0
nmid = constraint_f(mid)
if not nmid:
return find_split(a, qa, mid, nmid, constraint_f, toler)
else:
return find_split(mid, True, b, nqb, constraint_f, toler)
def my_func(input):
if input > 5.5:
return True
else:
return False
#print 'returned', find_split(-10.0, None, 20, None, my_func, .2)
print 'returned', find_split(20.0, None, -10., None, my_func, .1)
| [
[
1,
0,
0.0238,
0.0238,
0,
0.66,
0,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.0476,
0.0238,
0,
0.66,
0.1667,
91,
0,
1,
0,
0,
91,
0,
0
],
[
2,
0,
0.1071,
0.0476,
0,
0.... | [
"import numpy as np",
"import pdb",
"def max_wrt_constrain(a, b, constraint_f, toler):\n return find_split(a, None, b, None, constraint_f, toler)",
" return find_split(a, None, b, None, constraint_f, toler)",
"def min_wrt_constrain(a, b, constraint_f, toler):\n return find_split(b, None, a, None... |
import roslib; roslib.load_manifest('hai_sandbox')
import rospy
import pr2_msgs.msg as pm
import time
import hrl_lib.util as hru
class DataAccumulate:
def __init__(self, topic, type):
rospy.Subscriber(topic, type, self.callback)
self.data = []
self.headers = []
self.t = None
def callback(self, msg):
msg.header.stamp = msg.header.stamp.to_time()
self.data.append(msg)
self.t = time.time()
def done(self):
if self.t != None:
return (time.time() - self.t) > 2.
else:
return False
if __name__ == '__main__':
import sys
rospy.init_node('test01')
d = DataAccumulate('/pressure/l_gripper_motor', pm.PressureState)
r = rospy.Rate(10)
while not rospy.is_shutdown():
if d.done():
break
print 'saved to', sys.argv[1]
hru.save_pickle(d.data, sys.argv[1])
#print len(d.data)
#hru.save_pickle(d.data[0]['stamp'], sys.argv[1])
#hru.save_pickle(d.data[0]['frame_id'], sys.argv[1])
#hru.save_pickle(d.data[0]['l_finger_tip'], sys.argv[1])
#hru.save_pickle(d.data[0]['r_finger_tip'], sys.argv[1])
#hru.save_pickle(d.headers, 'headers.pkl')
#self.data.app
#self.data.append({"stamp": msg.header.stamp.to_time(),
# "frame_id": msg.header.frame_id,
# "l_finger_tip": msg.l_finger_tip,
# "r_finger_tip": msg.r_finger_tip})
#self.headers.append(msg.header)
| [
[
1,
0,
0.0106,
0.0106,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0106,
0.0106,
0,
0.66,
0.1429,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0213,
0.0106,
0,
0.... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import rospy",
"import pr2_msgs.msg as pm",
"import time",
"import hrl_lib.util as hru",
"class DataAccumulate:\n def __init__(self, topic, type):\n rospy.Subscriber(topic, type, self.c... |
import roslib; roslib.load_manifest('hai_sandbox')
import rospy
import hrl_lib.util as ut
import hai_sandbox.pr2 as pr2
import numpy as np
import time
import pdb
import sys
def dict_to_ps(d):
ps = gm.PoseStamped()
ps.pose.position.x = d['pose']['position']['x']
ps.pose.position.y = d['pose']['position']['y']
ps.pose.position.z = d['pose']['position']['z']
ps.pose.orientation.x = d['pose']['orientation']['x']
ps.pose.orientation.y = d['pose']['orientation']['y']
ps.pose.orientation.z = d['pose']['orientation']['z']
ps.pose.orientation.w = d['pose']['orientation']['w']
ps.header.frame_id = d['header']['frame_id']
ps.header.stamp = d['header']['stamp']
return ps
def imitate(data_fname):
# data = {'base_pose': pose_base,
# 'robot_pose': j0_dict,
# 'arm': arm_used,
# 'movement_states': None}
data = ut.load_pickle(data_fname)
rospy.init_node('imitate')
robot = pr2.PR2()
#self.pr2_pub = rospy.Publisher(pr2_control_topic, PoseStamped)
state = 'drive'
##Need to be localized!!
## NOT LEARNED: go into safe state.
## drive. learned locations. (might learn path/driving too?)
if state == 'drive':
t, r = data['base_pose']
print t
r = robot.base.set_pose(t, r, '/map', block=True)
rospy.loginfo('result is %s' % str(r))
state = 'init_manipulation'
## Need a refinement step
## Move joints to initial state. learned initial state. (maybe coordinate this with sensors?)
#Put robot in the correct state
if state == 'init_manipulation':
rospy.loginfo('STATE init_manipulation')
j0_dict = data['robot_pose']
cpos = robot.pose()
robot.left_arm.set_poses (np.column_stack([cpos['larm'], j0_dict['poses']['larm']]), np.array([0.1, 5.]), block=False)
robot.right_arm.set_poses(np.column_stack([cpos['rarm'], j0_dict['poses']['rarm']]), np.array([0.1, 5.]), block=False)
robot.head.set_poses(np.column_stack([cpos['head_traj'], j0_dict['poses']['head_traj']]), np.array([.01, 5.]))
robot.torso.set_pose(j0_dict['poses']['torso'][0,0], block=True)
state = 'manipulate'
if state == 'manipulate_cart':
rospy.loginfo('STATE manipulate')
rospy.loginfo('there are %d states' % len(data['movement_states']))
## For each contact state
for state in range(len(data['movement_states'])):
cur_state = data['movement_states'][state]
rospy.loginfo("starting %s" % cur_state['name'])
left_cart = cur_state['cartesian'][0]
right_cart = cur_state['cartesian'][1]
start_time = cur_state['start_time']
for ldict, rdict in zip(left_cart, right_cart):
lps = dict_to_ps(ldict)
rps = dict_to_ps(rdict)
time_from_start = ((lps.header.stamp - start_time) + (rps.header.stamp - start_time))/2.0
cur_time = rospy.get_rostime()
ntime = cur_time + rospy.Duration(time_from_start)
diff_time = (ntime - rospy.get_rostime()).to_sec()
if diff_time < 0:
rospy.logerror('DIFF time < 0, %f' % diff_time)
time.sleep(diff_time - .005)
#Publish...
if state == 'manipulate':
rospy.loginfo('STATE manipulate')
rospy.loginfo('there are %d states' % len(data['movement_states']))
## For each contact state
for state in range(len(data['movement_states'])):
cur_state = data['movement_states'][state]
rospy.loginfo("starting %s" % cur_state['name'])
larm, lvel, ltime, rarm, rvel, rtime = zip(*[[jdict['poses']['larm'], jdict['vels']['larm'], jdict['time'], \
jdict['poses']['rarm'], jdict['vels']['rarm'], jdict['time']] \
for jdict in cur_state['joint_states']])
larm = np.column_stack(larm)
rarm = np.column_stack(rarm)
lvel = np.column_stack(lvel)
rvel = np.column_stack(rvel)
ltime = np.array(ltime) - cur_state['start_time']
rtime = np.array(rtime) - cur_state['start_time']
## send trajectory. wait until contact state changes or traj. finished executing.
robot.left_arm.set_poses(larm[:,0], np.array([2.]), block=False)
robot.right_arm.set_poses(rarm[:,0], np.array([2.]), block=True)
robot.left_arm.set_poses(larm, ltime, vel_mat=lvel, block=False)
robot.right_arm.set_poses(rarm, rtime, vel_mat=rvel, block=True)
rospy.loginfo("%s FINISHED" % cur_state['name'])
time.sleep(5)
## rosbag implementation steps in time and also figures out how long to sleep until it needs to publish next message
## Just play pose stamped back at 10 hz
## For each contact state
class ControllerTest:
def __init__(self):
pass
def run(self):
self.robot = pr2.PR2()
rospy.loginfo('switching to cartesian controllers')
self.robot.controller_manager.switch(['l_cart', 'r_cart'], ['l_arm_controller', 'r_arm_controller'])
rospy.on_shutdown(self.shutdown)
r = rospy.Rate(1)
#publish posture & cartesian poses
while not rospy.is_shutdown():
self.robot.left_arm.set_posture(self.robot.left_arm.POSTURES['elbowupl'])
self.robot.right_arm.set_posture(self.robot.right_arm.POSTURES['elbowupr'])
r.sleep()
def shutdown(self):
rospy.loginfo('switching back joint controllers')
self.robot.controller_manager.switch(['l_arm_controller', 'r_arm_controller'], ['l_cart', 'r_cart'])
if __name__ == '__main__':
imitate(sys.argv[1])
if False:
c = ControllerTest()
c.run()
| [
[
1,
0,
0.0066,
0.0066,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0066,
0.0066,
0,
0.66,
0.0833,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0132,
0.0066,
0,
0.... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import rospy",
"import hrl_lib.util as ut",
"import hai_sandbox.pr2 as pr2",
"import numpy as np",
"import time",
"import pdb",
"import sys",
"def dict_to_ps(d):\n ps = gm.PoseStamped()\... |
import roslib; roslib.load_manifest('hai_sandbox')
import cv
import hai_sandbox.features as fea
import hrl_camera.ros_camera as rc
import rospy
#prosilica = rc.Prosilica('prosilica', 'streaming')
#prosilica = rc.ROSCamera('/narrow_stereo/right/image_rect')
prosilica = rc.ROSCamera('/wide_stereo/right/image_rect_color')
cv.NamedWindow('surf', 1)
while not rospy.is_shutdown():
f = prosilica.get_frame()
loc, desc = fea.surf_color(f, params=(0, 3000, 3, 4))
fdrawn = fea.draw_surf(f, loc, (0, 255, 0))
cv.ShowImage('surf', fdrawn)
cv.WaitKey(33)
| [
[
1,
0,
0.0588,
0.0588,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0588,
0.0588,
0,
0.66,
0.125,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.1176,
0.0588,
0,
0.6... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import cv",
"import hai_sandbox.features as fea",
"import hrl_camera.ros_camera as rc",
"import rospy",
"prosilica = rc.ROSCamera('/wide_stereo/right/image_rect_color')",
"cv.NamedWindow('surf'... |
import roslib; roslib.load_manifest('hai_sandbox')
import rospy
import sensor_msgs.msg as sm
import hrl_lib.rutils as ru
import numpy as np
import pr2_msgs.msg as pm
import tf
import hrl_lib.tf_utils as tfu
import tf.transformations as tr
import time
import hrl_lib.util as ut
def pointcloud_to_np(pc):
plist = []
for p in pc.points:
plist.append([p.x, p.y, p.z])
return np.matrix(plist).T
class ContactTipLocation:
def __init__(self):
rospy.init_node('contact3d')
rospy.Subscriber('/pressure/l_gripper_motor', pm.PressureState, self.lpress_cb)
rospy.Subscriber('/pressure/l_gripper_motor', pm.PressureState, self.rpress_cb)
self.ftip_frames = ['r_gripper_l_finger_tip_link',
'r_gripper_r_finger_tip_link',
'l_gripper_l_finger_tip_link',
'l_gripper_r_finger_tip_link']
self.tflistener = tf.TransformListener()
self.lmat0 = None
self.rmat0 = None
self.contact_locs = []
self.last_msg = None
def lpress_cb(self, pmsg):
#conv to mat
lmat = np.matrix((pmsg.l_finger_tip)).T
rmat = np.matrix((pmsg.r_finger_tip)).T
if self.lmat0 == None:
self.lmat0 = lmat
self.rmat0 = rmat
return
#zero
lmat = lmat - self.lmat0
rmat = rmat - self.rmat0
#touch detected
if np.any(np.abs(lmat) > 250) or np.any(np.abs(rmat) > 250): #TODO: replace this with something more sound
#Contact has been made!! look up gripper tip location
to_frame = 'base_link'
def frame_loc(from_frame):
p_base = tfu.transform('base_footprint', from_frame, self.tflistener) \
* tfu.tf_as_matrix(([0., 0., 0., 1.], tr.quaternion_from_euler(0,0,0)))
#* tfu.tf_as_matrix((p.A1.tolist(), tr.quaternion_from_euler(0,0,0)))
return tfu.matrix_as_tf(p_base)
tip_locs = [frame_loc(n)[0] for n in self.ftip_frames]
t = pmsg.header.stamp.to_time()
rospy.loginfo("contact detected at %.3f" % t)
self.contact_locs.append([t, tip_locs])
self.last_msg = time.time()
rospy.loginfo('lpress_cb ' + str(np.max(rmat)) + ' ' + str(np.max(lmat)))
def rpress_cb(self, pmsesg):
pass
#contact_mat(pmesg)
if __name__ == '__main__':
import sys
import pdb
fname = sys.argv[1]
scene = None
# Use offline bag files
# Load the pointcloud messages
# Find out the variance in # of points
# Select one as canonical
i = 0
for top, pc, t in ru.bag_iter(fname, ['/full_cloud']):
# Want first message of at least this size
if len(pc.points) > 20000:
if i > 0:
pdb.set_trace()
scene = pointcloud_to_np(pc)
break
i = i + 1
# Run online to get gripper tip locations from TF
# Subscribe to pressure readings, find contact times & get gripper tip locations
ctl = ContactTipLocation()
r = rospy.Rate(10)
print 'running contact tip recorder'
while not rospy.is_shutdown():
r.sleep()
if ctl.last_msg != None and (time.time() - ctl.last_msg) > 60.0:
break
print 'saving pickle contact_locs.pkl'
ut.save_pickle([scene, ctl.contact_locs], 'contact_locs.pkl')
pdb.set_trace()
print 'done.'
# Use gripper tip locations to find out where in this pointcloud we are
#class Contact3d:
# def __init__(self):
# rospy.init_node("contact_loc")
# rospy.Subscriber('full_cloud', sm.PointCloud, self.point_cloud_cb)
#
# def point_cloud_cb(self, pc_msg):
# if len(pc_msg.points) < 1:
# return
# pointcloud_to_np(pc_msg.points)
#c = Contact3d()
#r = rospy.Rate(10)
#print 'running'
#while not rospy.is_shutdown():
# r.sleep()
| [
[
1,
0,
0.0069,
0.0069,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0069,
0.0069,
0,
0.66,
0.0714,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0139,
0.0069,
0,
0.... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import rospy",
"import sensor_msgs.msg as sm",
"import hrl_lib.rutils as ru",
"import numpy as np",
"import pr2_msgs.msg as pm",
"import tf",
"import hrl_lib.tf_utils as tfu",
"import tf.tr... |
import roslib; roslib.load_manifest('hai_sandbox')
import rospy
import hrl_lib.util as ut
import sensor_msgs.msg as sm
import sys
class CamInfoCB:
def __init__(self, topic):
rospy.init_node('grab_cam_info')
rospy.Subscriber(topic, sm.CameraInfo, self.cb)
self.msg = None
def cb(self, msg):
self.msg = msg
topic = sys.argv[1]
save_name = sys.argv[2]
c = CamInfoCB(topic)
r = rospy.Rate(10)
while not rospy.is_shutdown() and c.msg == None:
r.sleep()
ut.save_pickle(c.msg, save_name)
| [
[
1,
0,
0.0417,
0.0417,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0417,
0.0417,
0,
0.66,
0.0833,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0833,
0.0417,
0,
0.... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import rospy",
"import hrl_lib.util as ut",
"import sensor_msgs.msg as sm",
"import sys",
"class CamInfoCB:\n def __init__(self, topic):\n rospy.init_node('grab_cam_info')\n rosp... |
import roslib; roslib.load_manifest('hai_sandbox')
import rospy
import kinematics_msgs.srv as ks
import hrl_lib.tf_utils as tfu
import tf
import hai_sandbox.pr2_kinematics as pr2k
import sensor_msgs.msg as sm
import hrl_lib.rutils as ru
import numpy as np
import hrl_lib.util as ut
def script():
rospy.init_node('forward_kin')
tflistener = tf.TransformListener()
print 'waiting for transform'
tflistener.waitForTransform('r_gripper_tool_frame', 'r_wrist_roll_link', rospy.Time(), rospy.Duration(10))
print 'waiting for services'
rospy.wait_for_service('pr2_right_arm_kinematics/get_fk_solver_info')#, ks.GetKinematicSolverInfo )
rospy.wait_for_service('pr2_right_arm_kinematics/get_fk')#, ks.GetPositionFK)
print 'done init'
r_fk_info = rospy.ServiceProxy('pr2_right_arm_kinematics/get_fk_solver_info', ks.GetKinematicSolverInfo )
r_fk = rospy.ServiceProxy('pr2_right_arm_kinematics/get_fk', ks.GetPositionFK)
resp = r_fk_info()
print 'get_fk_solver_info returned', resp.kinematic_solver_info.joint_names
print 'get_fk_solver_info returned', resp.kinematic_solver_info.limits
print 'get_fk_solver_info returned', resp.kinematic_solver_info.link_names
fk_req = ks.GetPositionFKRequest()
fk_req.header.frame_id = 'torso_lift_link'
fk_req.fk_link_names = ['r_wrist_roll_link']
fk_req.robot_state.joint_state.name = resp.kinematic_solver_info.joint_names
fk_req.robot_state.joint_state.position = [.5 for i in range(len(resp.kinematic_solver_info.joint_names))]
fk_resp = r_fk(fk_req)
rtip_T_wr = tfu.transform('r_gripper_tool_frame', 'r_wrist_roll_link', tflistener)
right_wr = tfu.pose_as_matrix(fk_resp.pose_stamped[0].pose)
rtip_pose = rtip_T_wr * right_wr
print tfu.matrix_as_tf(rtip_pose)
class TestForwardKin:
def __init__(self):
self.name_dict = None
self.msgs = []
self.joint_idx = {}
rospy.init_node('forward_kin')
self.tflistener = tf.TransformListener()
self.fkright = pr2k.PR2ArmKinematics('right', self.tflistener)
self.point_cloud_pub = rospy.Publisher('right_fk', sm.PointCloud)
rospy.Subscriber('/joint_states', sm.JointState, self.joint_state_cb)
print 'done init'
def joint_state_cb(self, msg):
if self.name_dict == None:
self.name_dict = {}
for i, n in enumerate(msg.name):
self.name_dict[n] = i
self.joint_groups = ut.load_pickle('link_names.pkl')
for group in self.joint_groups.keys():
self.joint_idx[group] = [self.name_dict[name] for name in self.joint_groups[group]]
dmat = np.matrix(msg.position).T
joint_angles = dmat[self.joint_idx['rarm'], 0].A1.tolist()
#print len(joint_angles)
#print dmat.shape, self.joint_idx['rarm']
rtip_pose = self.fkright.fk('base_footprint', 'r_wrist_roll_link', 'r_gripper_tool_frame', joint_angles)
position = rtip_pose[0:3,3]
#print position.T
pc = ru.np_to_pointcloud(position, 'base_footprint')
pc.header.stamp = rospy.get_rostime()
self.point_cloud_pub.publish(pc)
def run(self):
r = rospy.Rate(10)
while not rospy.is_shutdown():
r.sleep()
def kin_class():
tflistener = tf.TransformListener()
right = pr2k.PR2ArmKinematics('right', tflistener)
rtip_pose = right.fk('torso_lift_link', 'r_wrist_roll_link', 'r_gripper_tool_frame', [.5 for i in range(len(right.joint_names))])
print tfu.matrix_as_tf(rtip_pose)
left = pr2k.PR2ArmKinematics('left', tflistener)
ltip_pose = left.fk('torso_lift_link', 'l_wrist_roll_link', 'l_gripper_tool_frame', [.5 for i in range(len(left.joint_names))])
print tfu.matrix_as_tf(ltip_pose)
if __name__ == '__main__':
#kin_class()
t = TestForwardKin()
t.run()
| [
[
1,
0,
0.0085,
0.0085,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0085,
0.0085,
0,
0.66,
0.0714,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0169,
0.0085,
0,
0.... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import rospy",
"import kinematics_msgs.srv as ks",
"import hrl_lib.tf_utils as tfu",
"import tf",
"import hai_sandbox.pr2_kinematics as pr2k",
"import sensor_msgs.msg as sm",
"import hrl_lib.... |
import roslib; roslib.load_manifest('hai_sandbox')
import rospy
rospy.init_node('param_saver')
import hrl_lib.util as ut
joint_groups = {}
joint_groups['rarm'] = rospy.get_param('/r_arm_controller/joints')
joint_groups['larm'] = rospy.get_param('/l_arm_controller/joints')
joint_groups['head_traj'] = rospy.get_param('/head_traj_controller/joints')
joint_groups['torso'] = rospy.get_param('/torso_controller/joints')
ut.save_pickle(joint_groups, 'link_names.pkl')
| [
[
1,
0,
0.0909,
0.0909,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0909,
0.0909,
0,
0.66,
0.1,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.1818,
0.0909,
0,
0.66,... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import rospy",
"rospy.init_node('param_saver')",
"import hrl_lib.util as ut",
"joint_groups = {}",
"joint_groups['rarm'] = rospy.get_param('/r_arm_controller/joints')",
"joint_groups['larm... |
import roslib; roslib.load_manifest('hai_sandbox')
import rospy
import actionlib
import hrl_lib.tf_utils as tfu
import hrl_lib.util as ut
import hrl_pr2_lib.devices as hd
import hrl_camera.ros_camera as rc
import hai_sandbox.recognize_3d as r3d
#import hai_sandbox.msg as hmsg
import geometry_msgs.msg as gmsg
import tf
from cv_bridge import CvBridge, CvBridgeError
import numpy as np
import ml_lib.dataset as ds
import cv
import datetime
import hrl_lib.util as ut
import math
def str_from_time(ctime):
return datetime.datetime.fromtimestamp(ctime).strftime('%Y_%m_%d_%H_%M_%S')
class InterestPointPerception(r3d.InterestPointAppBase):
def __init__(self, object_name, labeled_data_fname, tf_listener):
r3d.InterestPointAppBase.__init__(self, object_name, labeled_data_fname)
self.tf_listener = tf_listener
if tf_listener == None:
self.tf_listener = tf.TransformListener()
self.laser_scan = hd.LaserScanner('point_cloud_srv')
self.prosilica = rc.Prosilica('prosilica', 'polled')
self.prosilica_cal = rc.ROSCameraCalibration('/prosilica/camera_info')
self.image_pub = r3d.ImagePublisher(object_name + '_perception', self.prosilica_cal)
self.last_added = None
self.disp = r3d.RvizDisplayThread(self.prosilica_cal)
self.disp.start()
def scan(self, point3d):
rospy.loginfo('InterestPointPerception: scanning..')
point_cloud_bl = self.laser_scan.scan(math.radians(180.), math.radians(-180.), 15)
prosilica_image = self.prosilica.get_frame()
image_T_laser = tfu.transform('/high_def_optical_frame', '/base_link', self.tf_listener)
#Extract features
self.feature_extractor = r3d.IntensityCloudData(point_cloud_bl,
prosilica_image, image_T_laser, self.prosilica_cal,
point3d, self.rec_params)
fex = self.feature_extractor
self.disp.display_scan(fex.point_cloud3d_orig, fex.points3d_valid_laser, fex.colors_valid,
prosilica_image, self.prosilica_cal)
rospy.loginfo('InterestPointPerception: extracting features..')
#self.instances, self.points2d, self.points3d = self.feature_extractor.extract_vectorized_features()
rospy.loginfo('InterestPointPerception: finished extraction.')
def is_ready(self):
return self.learner != None
def add_example(self, point3d_bl, label, pt2d=None):
fex = self.feature_extractor
feature = fex.feature_vec_at_mat(point3d_bl)
if feature == None:
return False
pt2d = fex.calibration_obj.project(tfu.transform_points(fex.image_T_laser, point3d_bl))
label = np.matrix(label)
self.add_to_dataset(feature, label, pt2d, point3d_bl)
self.last_added = {'pt': pt2d, 'l': label}
return True
def select_next_instances(self, n):
#selected_idx, selected_dist = self.learner.select_next_instances(self.instances, n)
return self.learner.select_next_instances(self.instances, n)
def get_likely_success_points(self):
#do something to reduce number of points
points3d_pos = self.classified_dataset.pt3d[:, np.where(self.classified_dataset.outputs == r3d.POSITIVE)[1].A1]
return points3d_pos
def draw_and_send(self):
self.classify()
img = cv.CloneMat(self.feature_extractor.image_cv)
#draw classified points.
colors = {r3d.POSITIVE: [0,255,0], r3d.NEGATIVE: [0,0,255]}
r3d.draw_labeled_points(img, self.classified_dataset)
#draw labeled data.
r3d.draw_labeled_points(img, self.dataset, colors[r3d.POSITIVE], colors[r3d.NEGATIVE])
#draw latest addition and its label.
r3d.draw_points(img, self.last_added['pt'], colors[self.last_added['l']], 4)
self.image_pub.publish(img, self.feature_extractor.calibration_obj)
if __name__ == '__main__':
import sys
object_name = sys.argv[1]
datafile = sys.argv[2]
server = InterestPointActions(object_name, datafile)
rospy.spin()
#def classify(self):
# r3d.InterestPointAppBase.classify(self)
# points3d_pos = self.classified_dataset.pt3d[:, np.where(self.classified_dataset.outputs == r3d.POSITIVE)[1].A1]
# return points3d_pos
#def add_to_dataset(self, feature, label, pt2d, pt3d):
# if self.dataset == None:
# self.dataset = InterestPointDataset(feature, label, [pt2d], [pt3d], self.feature_extractor)
# else:
# self.dataset.add(feature, label, pt2d, pt3d)
# #pos_ex = np.sum(self.dataset.outputs)
# #neg_ex = self.dataset.outputs.shape[1] - pos_ex
# #if pos_ex > 2 and neg_ex > 2 and self.blank:
# # self.train_learner()
# # self.blank = False
#def train(self):
# return None
#def train(self):
# if self.dataset != None:
# #train
# self.ipdetector.train(self.dataset)
# #classify
# #pdb.set_trace()
# results = []
# for i in range(self.instances.shape[1]):
# results.append(self.ipdetector.learner.classify(self.instances[:,i]))
# #pdb.set_trace()
# plist = [self.points2d[:, i] for i in range(self.points2d.shape[1])]
# p3list = [self.points3d[:, i] for i in range(self.points3d.shape[1])]
# self.classified_dataset = InterestPointDataset(self.instances, np.matrix(results),
# plist, p3list, self.feature_extractor)
#self.object_name = object_name
#self.ipdetector = r3d.InterestPointDetector()
#self.dataset = None
#self.labeled_data_fname = datafile
#if datafile != None:
# self.load_labeled_data()
#self.feature_extractor = None
#def load_labeled_data(self):
# self.dataset = ut.load_pickle(self.labeled_data_fname)
# print 'loaded from', self.labeled_data_fname
# self.dataset.pt2d = [None] * len(self.dataset.pt2d)
# self.dataset.pt3d = [None] * len(self.dataset.pt3d)
# self.ipdetector = InterestPointDetector(self.dataset)
# self.ipdetector.train(self.dataset)
#def add_to_dataset(self, feature, label, pt2d, pt3d):
# if self.dataset == None:
# self.dataset = InterestPointDataset(feature, label, [pt2d], [pt3d], self.feature_extractor)
# else:
# self.dataset.add(feature, label, pt2d, pt3d)
# pos_ex = np.sum(self.dataset.outputs)
# neg_ex = self.dataset.outputs.shape[1] - pos_ex
# if pos_ex > 2 and neg_ex > 2 and self.blank:
# self.train_learner()
# self.blank = False
# def __init__(self, object_name, datafile):
# self.node_name = object_name + '_active_perception_server'
# self.object_name = object_name
# rospy.init_node(self.node_name)
#
# #Load learner
# self.learner = r3d.SVMPCA_ActiveLearner()
# self.rec_params = r3d.Recognize3DParam()
#
# rospy.loginfo('Loading dataset: ' + datafile)
# labeled_light_switch_dataset = ut.load_pickle(datafile)
# rospy.loginfo('Training %s.' % object_name)
# self.learner.train(labeled_light_switch_dataset,
# labeled_light_switch_dataset.sizes['intensity'],
# self.rec_params.variance_keep)
#
# rospy.loginfo('Launching ROS connections')
#
# #Create message listeners
# self.tf_listener = tf.TransformListener()
# self.bridge = CvBridge()
#
# #Create servers
# self.find_as = actionlib.SimpleActionServer('find_' + object_name,
# hmsg.InterestPointLocate, self.find_cb, False)
# self.find_as.start()
# #self.find_as.is_preempt_requested()
# #self.find_as.set_preempted()
# #self.find_as.publish_feedback(self._feedback)
# #self.find_as.set_succeeded()
#
# self.add_example_as = actionlib.SimpleActionServer('add_example_' + object_name,
# hmsg.InterestPointAddExample, self.add_example_cb, False)
# self.add_example_as.start()
#
# self.pick_as = actionlib.SimpleActionServer('pick_' + object_name,
# hmsg.InterestPointPick, self.pick_cb, False)
# self.pick_as.start()
#
# rospy.loginfo('Ready.')
#
# #rospy.loginfo('Loading dataset: ' + datafile)
# #rospy.loginfo('Training %s.' % object_name)
# #rospy.loginfo('Launching ROS connections')
#
# def _find(self, cloud, image, calib, center, radius):
# #preprocess
# self.rec_params.radius = goal.radius
# image_T_laser = tfu.transform(calib.frame, cloud.header.frame_id, self.tf_listener)
# ic_data = r3d.IntensityCloudData(cloud, image, image_T_laser, calib, center, self.rec_params)
#
# #label
# instances = ic_data.extract_vectorized_features()
# results = []
# for i in range(instances.shape[1]):
# nlabel = self.learner.classify(instances[:, i])
# results.append(nlabel)
#
# results = np.matrix(results)
#
# #draw and save results
# image_cpy = cv.CloneImage(image)
# r3d.draw_labeled_points(ic_data, ds.Dataset(self.instances, results), image_cpy)
# cv.SaveImage('%s_%s.png' % (self.object_name, str_from_time(cloud.header.stamp.to_time())), image_cpy)
#
# #want 3d location of each instance
# positive_indices = np.where(results == r3d.POSITIVE)[1]
# positive_points_3d = ic_data.sampled_points[:, positive_indices]
#
# #return a random point for now
# rindex = np.random.randint(0, len(positive_indices))
#
# return positive_points_3d[:,rindex]
#
#
# def find_cb(self, goal):
# calib = rc.ROSCameraCalibration(offline=True)
# calib.camera_info(goal.camera_info)
# imagecv = self.bridge.imgmsg_to_cv(goal.image, encoding="bgr8")
# centermat = np.matrix([goal.center.x, goal.center.y, goal.center.z]).T
# round_points = self._find(goal.cloud, imagecv, centermat, goal_radius)
#
# results = hmsg.InterestPointLocateResult()
# results.header.frame_id = goal.cloud.header.frame_id
# results.header.stamp = rospy.Time.now()
# results.interest_points = [gmsg.Point(*round_point[:,i].T.A1.tolist()) for i in range(round_points.shape[1])]
# self.find_as.set_succeeded()
#
# def add_example_cb(self, goal):
# pass
#
# def pick_cb(self, goal):
# pass
| [
[
1,
0,
0.0035,
0.0035,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0035,
0.0035,
0,
0.66,
0.05,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0071,
0.0035,
0,
0.66... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import rospy",
"import actionlib",
"import hrl_lib.tf_utils as tfu",
"import hrl_lib.util as ut",
"import hrl_pr2_lib.devices as hd",
"import hrl_camera.ros_camera as rc",
"import hai_sandbox... |
import numpy as np
camera_t = np.matrix([1299102299.98, 1299102300.04, 1299102300.09, 1299102300.11, 1299102300.16, 1299102300.21, 1299102300.26, 1299102300.28, 1299102300.33, 1299102300.38, 1299102300.43, 1299102300.45, 1299102300.5, 1299102300.55, 1299102300.6, 1299102300.63, 1299102300.73, 1299102300.79, 1299102300.81, 1299102300.86, 1299102300.92, 1299102300.98, 1299102301.0, 1299102301.06, 1299102301.12, 1299102301.18, 1299102301.2, 1299102301.26, 1299102301.34, 1299102301.42, 1299102301.43, 1299102301.51, 1299102301.58, 1299102301.64, 1299102301.67, 1299102301.73, 1299102301.79, 1299102301.83, 1299102301.86, 1299102301.93, 1299102301.98, 1299102302.02, 1299102302.05, 1299102302.09, 1299102302.13, 1299102302.17, 1299102302.21, 1299102302.27, 1299102302.31, 1299102302.34, 1299102302.38, 1299102302.43, 1299102302.46, 1299102302.48, 1299102302.51, 1299102302.57, 1299102302.6])
fpfh_t = 1299102299.87
| [
[
1,
0,
0.3333,
0.3333,
0,
0.66,
0,
954,
0,
1,
0,
0,
954,
0,
0
],
[
14,
0,
0.6667,
0.3333,
0,
0.66,
0.5,
509,
3,
1,
0,
0,
162,
10,
1
],
[
14,
0,
1,
0.3333,
0,
0.66,... | [
"import numpy as np",
"camera_t = np.matrix([1299102299.98, 1299102300.04, 1299102300.09, 1299102300.11, 1299102300.16, 1299102300.21, 1299102300.26, 1299102300.28, 1299102300.33, 1299102300.38, 1299102300.43, 1299102300.45, 1299102300.5, 1299102300.55, 1299102300.6, 1299102300.63, 1299102300.73, 1299102300.79, 1... |
import roslib; roslib.load_manifest('hai_sandbox')
import rospy
import hrl_camera.ros_camera as rc
import cv
import hai_sandbox.features as fea
import sys
import threading as thr
import scipy.spatial as sp
import numpy as np
import pdb
class ShowImage(thr.Thread):
def __init__(self, name):
thr.Thread.__init__(self)
self.image = None
self.name = name
def run(self):
while not rospy.is_shutdown():
if self.image != None:
cv.ShowImage(self.name, self.image)
cv.WaitKey(33)
def concat_images(a, b):
img_height = max(a.height, b.height)
c = cv.CreateImage((a.width+b.width, img_height), a.depth, a.channels)
a_area = cv.GetSubRect(c, (0,0, a.width, a.height))
b_area = cv.GetSubRect(c, (a.width, 0, b.width, b.height))
cv.Add(a, a_area, a_area)
cv.Add(b, b_area, b_area)
return c
#class SURFMatcher:
# def __init__(self):
# self.model_images = {}
# self.model_fea = {}
#
# def add_file(self, model_name, label):
# model_img = cv.LoadImage(model_name)
# self.add_model(model_img, label)
#
# def add_model(self, model_img, label):
# mgray = fea.grayscale(model_img)
# m_loc, m_desc = fea.surf(mgray)
# self.model_images[label] = model_img
# self.model_fea[label] = {'loc': m_loc, 'desc': m_desc}
#
# def build_db(self):
# fea_l = []
# labels_l = []
# locs_l = []
# for k in self.model_fea:
# fea_l.append(np.array(self.model_fea[k]['desc']))
# locs_l.append(np.array(self.model_fea[k]['loc']))
# labels_l.append(np.array([k for i in range(len(self.model_fea[k]['desc']))]))
#
# self.labels = np.row_stack(labels_l)
# self.locs = np.row_stack(locs_l)
# self.tree = sp.KDTree(np.row_stack(fea_l))
#
# def match(self, desc, thres=.6):
# dists, idxs = self.tree.query(np.array(desc), 2)
# ratio = dists[0] / dists[1]
# if ratio < threshold:
# desc = self.tree.data[idxs[0]]
# loc = self.locs[idxs[0]]
# return desc, loc
# else:
# return None
def match_images(model_img, cand_img, threshold=.8):
#pdb.set_trace()
mgray = fea.grayscale(model_img)
cgray = fea.grayscale(cand_img)
m_loc, m_desc = fea.surf(mgray)
dirs = [direction for loc, lap, size, direction, hess in m_loc]
print 'max min dirs', np.min(dirs), np.max(dirs)
c_loc, c_desc = fea.surf(cgray)
features_db = sp.KDTree(np.array(m_desc))
matched = []
for i, desc in enumerate(c_desc):
dists, idxs = features_db.query(np.array(desc), 2)
ratio = dists[0] / dists[1]
#print "%d %.4f" % (i, ratio),
if ratio < threshold:
matched.append((i, idxs[0]))
#print 'matched!', idxs[0]
#else:
# print 'X|'
c_loc_moved = []
for loc, lap, size, d, hess in c_loc:
x, y = loc
nloc = (x + model_img.width, y)
c_loc_moved.append((nloc, lap, size, d, hess))
c_loc_matched, m_loc_matched = zip(*[[c_loc_moved[i], m_loc[j]] for i, j in matched])
joint = concat_images(model_img, cand_img)
joint_viz = joint
#joint_viz = fea.draw_surf(joint, c_loc_moved, (255,0,0))
#joint_viz = fea.draw_surf(joint_viz, c_loc_matched, (0,255,0))
#joint_viz = fea.draw_surf(joint_viz, m_loc, (255,0,0))
#joint_viz = fea.draw_surf(joint_viz, m_loc_matched, (0,255,0))
for cloc, mloc in zip(c_loc_matched, m_loc_matched):
cloc2d, _, _, _, _ = cloc
mloc2d, _, _, _, _ = mloc
cv.Line(joint_viz, cloc2d, mloc2d, (0,255,0), 1, cv.CV_AA)
print '%d matches found' % len(matched)
return joint_viz
def test_thresholds():
model_name = sys.argv[1]
candidate = sys.argv[2]
model_img = cv.LoadImage(model_name)
cand_img = cv.LoadImage(candidate)
for i in range(5):
thres = .8 - (i * .1)
print 'thres %.2f' % thres
joint_viz = match_images(model_img, cand_img, thres)
win_name = 'surf%.2f' % thres
cv.NamedWindow(win_name, 0)
cv.ShowImage(win_name, joint_viz)
while not rospy.is_shutdown():
cv.WaitKey(10)
###############
##############
if __name__ == '__main__':
mode = 'image'
#if mode = 'image':
# find pose of model
# find normal of model
# record angles of features.
if mode=='image':
test_thresholds()
if mode=='live':
model_name = sys.argv[1]
model_img = cv.LoadImage(model_name)
model_gray = fea.grayscale(model_img)
msurf_loc, msurf_desc = fea.surf(model_gray)
prosilica = rc.Prosilica('prosilica', 'streaming')
cv.NamedWindow('surf', 1)
si = ShowImage('surf')
si.start()
#Each feature is a row in db
features_db = sp.KDTree(np.array(msurf_desc))
#pdb.set_trace()
while not rospy.is_shutdown():
print '..'
image = prosilica.get_frame()
print 'saving image'
cv.SaveImage('frame.png', image)
print '>'
img_gray = fea.grayscale(image)
locs, descs = fea.surf(img_gray)
match_idxs = []
for i, desc in enumerate(descs):
dists, idxs = features_db.query(np.array(desc), 2)
ratio = dists[0] / dists[1]
if ratio < .49:
match_idxs.append(i)
img_viz = fea.draw_surf(image, locs, (255,0,0))
img_viz = fea.draw_surf(img_viz, [locs[i] for i in match_idxs], (0,0,255))
si.image = img_viz
print '%d matches found' % len(match_idxs)
#print len(desc), desc.__class__, len(descs[0]), descs[0].__class__
#si.image = image
| [
[
1,
0,
0.005,
0.005,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.005,
0.005,
0,
0.66,
0.0667,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0101,
0.005,
0,
0.66,
... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import rospy",
"import hrl_camera.ros_camera as rc",
"import cv",
"import hai_sandbox.features as fea",
"import sys",
"import threading as thr",
"import scipy.spatial as sp",
"import numpy ... |
import csv
import roslib; roslib.load_manifest('hai_sandbox')
from cv_bridge.cv_bridge import CvBridge, CvBridgeError
import rospy
import cv
import sys
import hrl_lib.rutils as ru
import hrl_lib.tf_utils as tfu
import tf.transformations as tr
import tf
import hrl_camera.ros_camera as cam
from sensor_msgs.msg import CameraInfo
import numpy as np
import hai_sandbox.features as fea
import os.path as pt
import hrl_lib.util as ut
import itertools as it
#Load original pickle
orig_bag = sys.argv[1]
topic = '/l_forearm_cam/image_rect_color'
#Load surf pickle
print 'loading pickle', sys.argv[2]
surf_pkl = ut.load_pickle(sys.argv[2])
new_surf_data = []
print 'replacing time field'
for tmt, surf_record in it.izip(ru.bag_iter(orig_bag, [topic]), surf_pkl) :
topic, msg, t = tmt
surf_t, surf_data = surf_record
new_surf_data.append((msg.header.stamp.to_time(), surf_data))
print 'saving pickle with new time', sys.argv[2]
ut.save_pickle(new_surf_data, sys.argv[2])
| [
[
1,
0,
0.027,
0.027,
0,
0.66,
0,
312,
0,
1,
0,
0,
312,
0,
0
],
[
1,
0,
0.0541,
0.027,
0,
0.66,
0.0385,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0541,
0.027,
0,
0.66... | [
"import csv",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"from cv_bridge.cv_bridge import CvBridge, CvBridgeError",
"import rospy",
"import cv",
"import sys",
"import hrl_lib.rutils as ru",
"import hrl_lib.tf_utils as tfu",
"import t... |
import roslib; roslib.load_manifest('hai_sandbox')
import rospy
import cv
import numpy as np
import feature_extractor_fpfh.srv as fsrv
import hrl_lib.image3d as i3d
import hrl_lib.rutils as ru
import hrl_lib.prob as pr
import hrl_lib.tf_utils as tfu
import pdb
##
# Generalized from Probabilistic robotics for N != weights.shape[0]
def sample_points(weights, N):
assert(weights.shape[0] >= N)
M = weights.shape[0]
weights = weights / np.sum(weights)
r = np.random.rand() * (1.0/M)
c = weights[0,0]
i = 0
Xt = []
indices = np.sort(np.random.permutation(np.arange(1, M+1))[0:N]).tolist()
for m in indices:
U = r + (m - 1) * (1.0/M)
while U > c:
i = i + 1
c = c + weights[i,0]
Xt.append(i)
return Xt
def test_sample_points():
w = np.matrix([.1,.4,.5]).T
count = [0., 0., 0.]
for i in range(6000.):
idx = sample_points(w, 2)
for x in idx:
count[x] += 1
print np.matrix(count) / np.sum(count)
def intensity_pyramid_feature(point2d_image, np_image_arr, win_size, multipliers, flatten=True):
invalid_location = False
local_intensity = []
for multiplier in multipliers:
if multiplier == 1:
features = i3d.local_window(point2d_image, np_image_arr, win_size, flatten=flatten)
else:
features = i3d.local_window(point2d_image, np_image_arr, win_size*multiplier,
resize_to=win_size, flatten=flatten)
if features == None:
invalid_location = True
break
else:
local_intensity.append(features)
if invalid_location:
return None
else:
if flatten:
return np.row_stack(local_intensity)
else:
return local_intensity
class Subsampler:
def __init__(self):
self.proxy = rospy.ServiceProxy('subsample', fsrv.SubsampleCalc)
def subsample(self, points3d, frame='base_link'):
req = fsrv.SubsampleCalcRequest()
req.input = ru.np_to_pointcloud(points3d, frame)
res = self.proxy(req)
return ru.pointcloud_to_np(res.output)
class IntensityCloudFeatureExtractor:
def __init__(self, pointcloud_bl, cvimage_mat, expected_loc_bl, distance_feature_points,
image_T_bl, camera_calibration, params):
self.pointcloud_bl = pointcloud_bl
self.cvimage_mat = cvimage_mat
self.expected_loc_bl = expected_loc_bl
self.distance_feature_points = distance_feature_points
self.image_T_bl = image_T_bl
self.camera_calibration = camera_calibration
self.params = params
self.subsampler_service = Subsampler()
self.sizes = None #Important but access should be limited to decouple code
def get_sizes(self):
return self.sizes
def _subsample(self):
rospy.loginfo('Subsampling using PCL')
rospy.loginfo('before %s' % str(self.pointcloud_bl.shape))
self.pc_sub_samp_bl = self.subsampler_service.subsample(self.pointcloud_bl)
rospy.loginfo('after %s' % str(self.pc_sub_samp_bl.shape))
def _sample_points(self):
rospy.loginfo('Sampling points')
#evaluate all points
gaussian = pr.Gaussian(self.expected_loc_bl, \
np.matrix([[self.params.uncertainty_x**2, 0, 0], \
[0, self.params.uncertainty_y**2, 0], \
[0, 0, self.params.uncertainty_z**2]]))
pdf = gaussian.pdf_mat()
probs = np.matrix(pdf(self.pc_sub_samp_bl))
#sample unique points
n_samples = min(self.params.n_samples, self.pc_sub_samp_bl.shape[1])
pt_indices = list(set(sample_points(probs.T, n_samples)))
#only keep those that are in bound of points
sampled_pts3d_bl = self.pc_sub_samp_bl[:, pt_indices]
sampled_pts3d_image = tfu.transform_points(self.image_T_bl, sampled_pts3d_bl)
sampled_pts2d = self.camera_calibration.project(sampled_pts3d_image)
sampled_pix2d = np.matrix(np.round(sampled_pts2d))
#throw away points that are outside of bounds
x = sampled_pix2d[0,:]
y = sampled_pix2d[1,:]
good_pts = np.where((x >= 0) + (x < self.camera_calibration.w) \
+ (y >= 0) + (y < self.camera_calibration.h))[1].A1
sampled_pts3d_bl = sampled_pts3d_bl[:, good_pts]
sampled_pix2d = sampled_pix2d[:, good_pts]
rospy.loginfo('got %s good points' % str(sampled_pix2d.shape[1]))
return sampled_pts3d_bl, sampled_pix2d
def feature_vec_at(self, point3d_bl, point2d_image):
fea_calculated = []
#Get synthetic distance points
distance_feas = None
if self.distance_feature_points != None:
distance_feas = np.power(np.sum(np.power(self.distance_feature_points - point3d_bl, 2), 0), .5).T
fea_calculated.append(distance_feas)
#Get intensity features
intensity = intensity_pyramid_feature(point2d_image, np.asarray(self.cvimage_mat),
self.params.win_size, self.params.win_multipliers, True)
#pdb.set_trace()
if intensity == None:
return None
else:
fea_calculated.append(intensity)
if self.sizes == None:
self.sizes = {}
if distance_feas != None:
self.sizes['distance'] = distance_feas.shape[0]
self.sizes['intensity'] = intensity.shape[0]
return fea_calculated
def extract_features(self):
self._subsample()
sampled_pts3d_bl, sampled_pix2d = self._sample_points()
features_l = []
pts_with_features = []
rospy.loginfo('Extracting features')
for i in range(sampled_pts3d_bl.shape[1]):
features = self.feature_vec_at(sampled_pts3d_bl[:,i], sampled_pix2d[:,i])
if features != None:
features_l.append(features)
pts_with_features.append(i)
if i % 500 == 0:
rospy.loginfo(i)
features_by_type = zip(*features_l)
xs = np.row_stack([np.column_stack(f) for f in features_by_type])
rospy.loginfo('Finished feature extraction.')
return xs, sampled_pix2d[:, pts_with_features], sampled_pts3d_bl[:, pts_with_features]
| [
[
1,
0,
0.0055,
0.0055,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0055,
0.0055,
0,
0.66,
0.0667,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0109,
0.0055,
0,
0.... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import rospy",
"import cv",
"import numpy as np",
"import feature_extractor_fpfh.srv as fsrv",
"import hrl_lib.image3d as i3d",
"import hrl_lib.rutils as ru",
"import hrl_lib.prob as pr",
"... |
import csv
import roslib; roslib.load_manifest('hai_sandbox')
from cv_bridge.cv_bridge import CvBridge, CvBridgeError
import rospy
import cv
import sys
import hrl_lib.rutils as ru
import hrl_lib.tf_utils as tfu
import tf.transformations as tr
import tf
import hrl_camera.ros_camera as cam
from sensor_msgs.msg import CameraInfo
import numpy as np
import hai_sandbox.features as fea
import os.path as pt
import hrl_lib.util as ut
##
# @param bagname
# @param topic
# @return features_list list of tuples [(float time, (list surf_keypoints, list surf_descriptors))...]
def find_image_features(bagname, topic):
features_list = []
bridge = CvBridge()
i = 0
for topic, msg, t in ru.bag_iter(bagname, [topic]):
t = msg.header.stamp.to_time()
image = bridge.imgmsg_to_cv(msg, 'bgr8')
image_gray = fea.grayscale(image)
surf_keypoints, surf_descriptors = fea.surf(image_gray)
features_list.append((t, (surf_keypoints, surf_descriptors)))
rospy.loginfo("%.3f frame %d found %d points" % (t, i, len(surf_keypoints)))
i = i + 1
return features_list
def csv_bag_names(fname):
csv_file = open(fname)
for bag_name in csv.reader(csv_file):
yield bag_name
csv_file.close()
if __name__ == '__main__':
forearm_cam_l = '/l_forearm_cam/image_rect_color'
ws_l = '/wide_stereo/left/image_rect_color'
ws_r = '/wide_stereo/right/image_rect_color'
#features_list = find_image_features(sys.argv[1], forearm_cam_l)
#Find all features in all videos that we have
fname = sys.argv[1]
for path_complete in csv_bag_names(fname):
path_complete = path_complete[0]
rospy.loginfo('processing %s'% path_complete)
features_list = find_image_features(path_complete, forearm_cam_l)
path_bag, name_bag = pt.split(path_complete)
root_name, _ = pt.splitext(name_bag)
surf_path_complete = pt.join(path_bag, root_name + '.surf_pkl')
#pickle features list
rospy.loginfo('saving feature extraction results to %s' % surf_path_complete)
ut.save_pickle(features_list, surf_path_complete)
| [
[
1,
0,
0.0133,
0.0133,
0,
0.66,
0,
312,
0,
1,
0,
0,
312,
0,
0
],
[
1,
0,
0.0267,
0.0133,
0,
0.66,
0.0526,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0267,
0.0133,
0,
... | [
"import csv",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"from cv_bridge.cv_bridge import CvBridge, CvBridgeError",
"import rospy",
"import cv",
"import sys",
"import hrl_lib.rutils as ru",
"import hrl_lib.tf_utils as tfu",
"import t... |
import roslib; roslib.load_manifest('hai_sandbox')
import rospy
import sys
import hrl_lib.util as ut
import hrl_lib.rutils as ru
import tf
import hrl_lib.transforms as htf
import hrl_lib.tf_utils as tfu
import tf.transformations as tr
import pr2_msgs.msg as pm
import scipy.spatial as sp
from multiprocessing import Process
import time
import os
import numpy as np
import math
import cv
import hai_sandbox.features as fea
import hrl_pr2_lib.pr2_kinematics as pr2k
import pdb
import sensor_msgs.msg as sm
import hrl_pr2_lib.devices as hpr2
#import perception3d.gaussian_curvature as gc
def segment_msgs(time_segments, msgs):
segs = []
for segment in time_segments:
start = segment[0]
endt = segment[1]
sx = 0
ex = len(msgs)
if start != 'start':
for i, m in enumerate(msgs):
if m.header.stamp.to_time() < start:
sx = i
else:
break
if endt != 'end':
for i, m in enumerate(msgs[sx:]):
if m.header.stamp.to_time() > endt:
ex = i + sx
break
#pdb.set_trace()
seg = msgs[sx:ex]
segs.append(msgs[sx: ex])
return segs
##
# Find times where contact has been made
#
# @return array of locations where contact has been made, array of times for each location where contact has been made (can be duplicated)
def find_contact_times(left_mat, right_mat, times, thres):
left_mat = left_mat - left_mat[:, 0]
right_mat = right_mat - right_mat[:,0]
#When/where did contact happen?
#TODO: we are assuming just one finger of one arm here!
#pdb.set_trace()
loc_r, time_c = np.where(np.abs(left_mat) > thres)
times_contact = times[time_c.A1]
unique_times = np.array(np.sort(list(set(times_contact.tolist()))))
#return (loc_r, times_contact)
return unique_times
def playback_bag(bag_name):
cmd = 'rosbag play %s --clock' % bag_name
print cmd
os.system(cmd)
class SimpleJointStateMsg:
def __init__(self, header, transforms_dict):
self.header = header
self.transforms_dict = transforms_dict
# Find contact points & transform gripper tip to base_frame
class ExtractTFData:
def __init__(self, listener=None):#, pointcloud_msg):
rospy.Subscriber('/pressure/l_gripper_motor', pm.PressureState, self.lpress_cb)
rospy.Subscriber('/joint_states', sm.JointState, self.joint_state_cb)
self.ftip_frames = ['r_gripper_tool_frame',
'l_gripper_tool_frame']
if listener != None:
self.tflistener = listener
else:
self.tflistener = tf.TransformListener()
self.lmat0 = None
self.rmat0 = None
self.contact_locs = []
self.base_frame = '/base_footprint'
self.contact = False
self.contact_stopped = False
self.pointcloud_transform = None
self.jstate_msgs = []
self.last_jstate_time = time.time() + 999999999.
def joint_state_cb(self, jmsg):
tdict = {}
# good for display purposes (independent of torso link etc)
tdict['bf_T_rtip'] = tfu.transform('/base_footprint', self.ftip_frames[0], self.tflistener)
tdict['bf_T_ltip'] = tfu.transform('/base_footprint', self.ftip_frames[1], self.tflistener)
# want FK from torso
tdict['torso_T_rtip'] = tfu.transform('/torso_lift_link', self.ftip_frames[0], self.tflistener)
tdict['torso_T_ltip'] = tfu.transform('/torso_lift_link', self.ftip_frames[1], self.tflistener)
#self.jstate_msgs.append(SimpleJointStateMsg(jmsg.header, tdict))
self.last_jstate_time = time.time()
def lpress_cb(self, pmsg):
lmat = np.matrix((pmsg.l_finger_tip)).T
rmat = np.matrix((pmsg.r_finger_tip)).T
if self.lmat0 == None:
self.lmat0 = lmat
self.rmat0 = rmat
return
#zero
lmat = lmat - self.lmat0
rmat = rmat - self.rmat0
##
# extract data during contact events
#touch detected
if np.any(np.abs(lmat) > 250) or np.any(np.abs(rmat) > 250): #TODO: replace this with something more sound
#Contact has been made!! look up gripper tip location
def frame_loc(from_frame):
p_base = tfu.transform(self.base_frame, from_frame, self.tflistener) \
* tfu.tf_as_matrix(([0., 0., 0., 1.], tr.quaternion_from_euler(0,0,0)))
return tfu.matrix_as_tf(p_base)
tip_locs = [frame_loc(n)[0] for n in self.ftip_frames]
t = pmsg.header.stamp.to_time()
rospy.loginfo("contact detected at %.3f" % t)
self.contact_locs.append([t, tip_locs])
self.contact = True
else:
if self.contact == True:
rospy.loginfo('contact stopped')
self.contact_stopped = True
self.contact = False
#Only get this transform after we've stopped making contact.
#self.pointcloud_transform = tfu.transform(self.base_frame, self.pointcloud_msg.header.frame_id, self.tflistener)
class JointMsgConverter:
def __init__(self):
self.name_dict = None
self.joint_groups = ut.load_pickle('link_names.pkl')
#self.joint_groups['rarm'] = rospy.get_param('/r_arm_controller/joints')
#self.joint_groups['larm'] = rospy.get_param('/l_arm_controller/joints')
#self.joint_groups['head_traj'] = rospy.get_param('/head_traj_controller/joints')
#self.joint_groups['torso'] = rospy.get_param('/torso_controller/joints')
def msgs_to_dict(self, msgs):
converted = []
for i in range(len(msgs)):
msg = msgs[i]
if self.name_dict == None:
self.name_dict = {}
for i, n in enumerate(msg.name):
self.name_dict[n] = i
self.joint_idx = {}
for group in self.joint_groups.keys():
self.joint_idx[group] = [self.name_dict[name] for name in self.joint_groups[group]]
joint_poses = {}
joint_vel = {}
joint_eff = {}
#extract data for pose, vel, eff
for d, data in zip([joint_poses, joint_vel, joint_eff], [msg.position, msg.velocity, msg.effort]):
#look up values for each joint group
dmat = np.matrix(data).T
for group in self.joint_groups.keys():
d[group] = dmat[self.joint_idx[group], 0]
converted.append({'poses': joint_poses, 'vels': joint_vel, 'efforts': joint_eff, 'time': msg.header.stamp.to_time()})
#Take out wrapping of forearm & wrist
for group in ['rarm', 'larm']:
for i in range(len(self.joint_groups[group])):
joint = self.joint_groups[group][i]
if 'forearm_roll' in joint or 'wrist_roll' in joint:
delta = msgs[1].position[i] - msgs[0].position[i]
realdelta = delta % (2 * math.pi)
if realdelta >= math.pi:
realdelta -= 2 * math.pi
correction = delta - realdelta
for j in range(1, len(msgs)):
converted[j]['poses'][group][i,0] -= correction
#msgs[j].positions[i] -= correction
return converted
##
# @param surf_locs list of ((x,y), lap, size, dir, hess)
# @param point_cloud_3d 3xn matrix
# @param point_cloud_2d 2xn matrix
def assign_3d_to_surf(surf_locs, point_cloud_3d, point_cloud_2d):
point_cloud_2d_tree = sp.KDTree(np.array(point_cloud_2d.T))
#print '>> shape of point_cloud_3d', point_cloud_3d.shape
surf_loc3d = []
for s in surf_locs:
loc = s[0]
idx = point_cloud_2d_tree.query(np.array(loc))[1]
surf_loc3d.append(point_cloud_3d[:, idx])
#print ' %s matched to %s 3d %s' % (str(loc), str(point_cloud_2d[:,idx].T), str(point_cloud_3d[:, idx].T))
surf_loc3d = np.column_stack(surf_loc3d)
return surf_loc3d
def find_contacts_and_fk(tflistener, arm):
find_contact_locs = ExtractTFData(tflistener)
while not rospy.is_shutdown() \
and (not find_contact_locs.contact_stopped) \
and ((time.time() - find_contact_locs.last_jstate_time) < 1.):
print 'waiting for ExtractTFData to finish.'
time.sleep(.5)
print 'got %d joint state messages' % len(find_contact_locs.jstate_msgs)
contact_locs = find_contact_locs.contact_locs
if arm == 'right':
contact_tips = [np.matrix(r[1][0]).T for r in contact_locs]
else:
contact_tips = [np.matrix(r[1][1]).T for r in contact_locs]
contact_tips = np.column_stack(contact_tips)
return contact_tips[:,0], find_contact_locs.jstate_msgs
def project_2d_bounded(cam_info, point_cloud_cam):
point_cloud_2d_cam = cam_info.project(point_cloud_cam)
# only count points in image bounds (should be in cam info)
_, in_bounds = np.where(np.invert((point_cloud_2d_cam[0,:] >= (cam_info.w-.6)) + (point_cloud_2d_cam[0,:] < 0) \
+ (point_cloud_2d_cam[1,:] >= (cam_info.h-.6)) + (point_cloud_2d_cam[1,:] < 0)))
point_cloud_2d_cam = point_cloud_2d_cam[:, in_bounds.A1]
point_cloud_reduced_cam = point_cloud_cam[:, in_bounds.A1]
return point_cloud_2d_cam, point_cloud_reduced_cam
def find3d_surf(start_conditions):
## Project pointcloud into 2d
point_cloud_bf = ru.pointcloud_to_np(start_conditions['points'])
# from base_frame to prosilica frame
point_cloud_pro = tfu.transform_points(start_conditions['pro_T_bf'], point_cloud_bf)
point_cloud_2d_pro, point_cloud_reduced_pro = project_2d_bounded(start_conditions['camera_info'], point_cloud_pro)
#point_cloud_2d_pro = .project(point_cloud_pro)
## only count points in image bounds (should be in cam info)
#cam_info = start_conditions['camera_info']
#_, in_bounds = np.where(np.invert((point_cloud_2d_pro[0,:] >= (cam_info.w-.6)) + (point_cloud_2d_pro[0,:] < 0) \
# + (point_cloud_2d_pro[1,:] >= (cam_info.h-.6)) + (point_cloud_2d_pro[1,:] < 0)))
#point_cloud_2d_pro = point_cloud_2d_pro[:, in_bounds.A1]
#point_cloud_reduced_pro = point_cloud_pro[:, in_bounds.A1]
## Find 3D SURF features
model_file_name = start_conditions['model_image']
model_surf_loc, model_surf_descriptors = fea.surf_color(cv.LoadImage(model_file_name))
surf_loc3d_pro = np.matrix(assign_3d_to_surf(model_surf_loc, point_cloud_reduced_pro, point_cloud_2d_pro))
return model_surf_loc, model_surf_descriptors, surf_loc3d_pro, point_cloud_2d_pro
##########################################################################
# TODO: need some parameters for processing 'model_image', maybe circles
# of different sizes.
def extract_object_localization_features2(start_conditions, tflistener, arm_used, p_base_map):
mid_contact_bf, jstate_msgs = find_contacts_and_fk(tflistener, arm_used)
model_surf_loc, model_surf_descriptors, surf_loc3d_pro, point_cloud_2d_pro = find3d_surf(start_conditions)
#Find frame
surf_loc3d_bf = (np.linalg.inv(start_conditions['pro_T_bf']) \
* np.row_stack((surf_loc3d_pro, 1+np.zeros((1,surf_loc3d_pro.shape[1])))))[0:3,:]
frame_bf = create_frame(surf_loc3d_bf, p=np.matrix([-1, 0, 0.]).T)
center_bf = np.mean(surf_loc3d_bf, 1)
#Find out what the SURF features point to in this new frame
bf_R_pro = (start_conditions['pro_T_bf'][0:3, 0:3]).T
bf_R_obj = frame_bf
x_bf = frame_bf[:,0]
x_pro = bf_R_pro.T * x_bf
x_ang_pro = math.atan2(x_pro[1,0], x_pro[0,0])
center_pro = tfu.transform_points(start_conditions['pro_T_bf'], center_bf)
center2d_pro = start_conditions['camera_info'].project(center_pro)
surf_directions = []
surf_dir_center = []
for loc, lap, size, direction, hess in model_surf_loc:
surf_directions.append(ut.standard_rad(np.radians(direction) - x_ang_pro))
direction_to_center = center2d_pro - np.matrix(loc).T
surf_dir_center.append(direction_to_center)
surf_dir_center = np.column_stack(surf_dir_center)
return {
'contact_bf': mid_contact_bf,
'surf_loc3d_pro': surf_loc3d_pro,
'surf_loc2d_pro': model_surf_loc,
'point_cloud_2d_pro': point_cloud_2d_pro,
'surf_directions': surf_directions, #Orientation
'surf_pose_dir2d': surf_dir_center, #Position
'descriptors': model_surf_descriptors,
'jtransforms': jstate_msgs,
'frame_bf': frame_bf,
'center_bf': center_bf
}
##########################################################################
# TODO: need some parameters for processing 'model_image', maybe circles
# of different sizes.
def extract_object_localization_features(start_conditions, tflistener):
## Find contacts
find_contact_locs = ExtractTFData(tflistener)
r = rospy.Rate(10)
while not rospy.is_shutdown() and not find_contact_locs.contact_stopped:
r.sleep()
contact_locs = find_contact_locs.contact_locs
## Detect features, get 3d location for each feature
model_file_name = start_conditions['model_image']
model_surf_loc, model_surf_descriptors = fea.surf_color(cv.LoadImage(model_file_name))
## Assign 3d location to surf features
point_cloud_bf = ru.pointcloud_to_np(start_conditions['points'])
point_cloud_pro = start_conditions['pro_T_bf'] * np.row_stack((point_cloud_bf, 1+np.zeros((1, point_cloud_bf.shape[1]))))
point_cloud_2d_pro = start_conditions['camera_info'].project(point_cloud_pro[0:3,:])
surf_loc3d_arr_bf = np.array(assign_3d_to_surf(model_surf_loc, point_cloud_bf, point_cloud_2d_pro))
surf_loc_tree_bf = sp.KDTree(surf_loc3d_arr_bf.T)
#################################################
# not needed right now but can be useful later..
#################################################
# Get SURF features closest to contact locs
left_contact, right_contact = zip(*[(np.matrix(r[1][2]).T, np.matrix(r[1][3]).T) for r in contact_locs])
left_contact = np.column_stack(left_contact)
right_contact = np.column_stack(right_contact)
mid_contact_bf = (left_contact[:,0] + right_contact[:,0]) / 2.
#data_dict['pro_T_bf'] * np.row_stack((mid_contact_bf, np
surf_closest_idx = surf_loc_tree_bf.query(np.array(mid_contact_bf.T))[1] #Get surf feature at mid point
surf_closest3d = surf_loc3d_arr_bf[:, surf_closest_idx]
surf_closest_fea = model_surf_loc[surf_closest_idx]
#Create a frame for this group of features
surf_loc_3d_pro = (start_conditions['pro_T_bf'] * np.row_stack([surf_loc3d_arr_bf, 1 + np.zeros((1, surf_loc3d_arr_bf.shape[1]))]))[0:3,:]
object_frame_pro = create_frame(np.matrix(surf_loc_3d_pro))
#Find out what the SURF features point to in this new frame
surf_directions = []
for loc, lap, size, direction, hess in model_surf_loc:
drad = np.radians(direction)
#project direction into the cannonical object frame
surf_dir_obj = object_frame_pro * np.matrix([np.cos(drad), np.sin(drad), 0.]).T
#measure angle between SURF feature and x axis of object frame, store this as delta theta
delta_theta = math.atan2(surf_dir_obj[1,0], surf_dir_obj[0,0])
surf_directions.append(delta_theta)
return {
'descriptors': model_surf_descriptors,
'directions': surf_directions,
'contact_bf': mid_contact_bf,
'closest_feature': surf_closest_fea[0],
#'object_frame_bf': [np.mean(np.matrix(surf_loc3d_arr_bf), 1), create_frame(surf_loc3d_arr_bf)],
'object_frame_pro': [np.mean(np.matrix(surf_loc_3d_pro), 1), object_frame_pro, surf_loc_3d_pro]
}
#surf_dir_obj => obj_dir
#project all points ontocz
#draw this surf feature in image
#proc_surfed = fea.draw_surf(proc_img, model_surf_loc, (200, 0, 0))
#proc_surfed = fea.draw_surf(proc_surfed, [surf_closest_fea], (0,0,255))
#########################################################
# Pose estimation
# # for each SURF direction, subtract delta_theta from it to get a vote for the direction of the object frame's
# x axis
# # average all the predictions to get object's x direction
def create_frame(points3d, p=np.matrix([-1,0,0.]).T):
#pdb.set_trace()
u, s, vh = np.linalg.svd(np.cov(points3d))
u = np.matrix(u)
# Pick normal
if (u[:,2].T * p)[0,0] < 0:
normal = -u[:,2]
else:
normal = u[:,2]
# pick the next direction as the one closest to to +z or +x
z_plus = np.matrix([0, 0, 1.0]).T
x_plus = np.matrix([1, 0, 0.0]).T
u0 = u[:,0]
u1 = u[:,1]
mags = []
pos_dirs = []
for udir in [u0, u1, -u0, -u1]:
for can_dir in [z_plus, x_plus]:
mags.append((udir.T * can_dir)[0,0])
pos_dirs.append(udir)
x_dir = pos_dirs[np.argmax(mags)]
# Cross product for the final (is this the same as the final vector?)
y_dir = np.cross(normal.T, x_dir.T).T
return np.matrix(np.column_stack([x_dir, y_dir, normal]))
def process_bag(full_bag_name, prosilica_image_file, model_image_file, experiment_start_condition_pkl, arm_used='left'):
bag_path, bag_name_ext = os.path.split(full_bag_name)
filename, ext = os.path.splitext(bag_name_ext)
###############################################################################
# Playback the bag
bag_playback = Process(target=playback_bag, args=(full_bag_name,))
bag_playback.start()
###############################################################################
## Listen for transforms using rosbag
rospy.init_node('bag_proceessor')
tl = tf.TransformListener()
print 'waiting for transform'
tl.waitForTransform('map', 'base_footprint', rospy.Time(), rospy.Duration(20))
# Extract the starting location map_T_bf
p_base = tfu.transform('map', 'base_footprint', tl) \
* tfu.tf_as_matrix(([0., 0., 0., 1.], tr.quaternion_from_euler(0,0,0)))
t, r = tfu.matrix_as_tf(p_base)
pose_base = (t, r)
print 'done with tf'
##########################################################
## Find contact locations
start_conditions = ut.load_pickle(experiment_start_condition_pkl)
start_conditions['highdef_image'] = prosilica_image_file
start_conditions['model_image'] = model_image_file
rospy.loginfo('extracting object localization features')
start_conditions['pose_parameters'] = extract_object_localization_features2(start_conditions, tl, arm_used, p_base)
if bag_playback.is_alive():
rospy.loginfo('Terminating playback process')
bag_playback.terminate()
time.sleep(1)
bag_playback.terminate()
time.sleep(1)
rospy.loginfo('Playback process terminated? %s' % str(not bag_playback.is_alive()))
###############################################################################
#Read bag using programmatic API
pr2_kinematics = pr2k.PR2Kinematics(tl)
converter = JointMsgConverter()
rospy.loginfo('opening bag, reading state topics')
topics_dict = ru.bag_sel(full_bag_name, ['/joint_states', '/l_cart/command_pose',
'/r_cart/command_pose', '/torso_controller/state',
'/pressure/l_gripper_motor', '/pressure/r_gripper_motor'])
## Select the arm that has been moving, segment joint states based on contact states.
if arm_used == 'left':
pressures = topics_dict['/pressure/l_gripper_motor']
elif arm_used == 'right':
pressures = topics_dict['/pressure/r_gripper_motor']
else:
raise RuntimeError('arm_used invalid')
## find contact times
rospy.loginfo('Finding contact times')
left_f, right_f, ptimes = hpr2.pressure_state_to_mat(pressures['msg'])
## create segments based on contacts
# TODO: make this accept more contact stages
contact_times = find_contact_times(left_f, right_f, ptimes, 250)
if len(contact_times) > 2:
time_segments = [['start', contact_times[0]], [contact_times[0], contact_times[-1]], [contact_times[-1], 'end']]
else:
time_segments = [['start', 'end']]
rospy.loginfo('Splitting messages based on contact times')
## split pressure readings based on contact times
pressure_lseg = segment_msgs(time_segments, topics_dict['/pressure/l_gripper_motor']['msg'])
pressure_rseg = segment_msgs(time_segments, topics_dict['/pressure/r_gripper_motor']['msg'])
## split cartesian commands based on contact times
lcart_seg = segment_msgs(time_segments, topics_dict['/l_cart/command_pose']['msg'])
rcart_seg = segment_msgs(time_segments, topics_dict['/r_cart/command_pose']['msg'])
## split joint states
joint_states = topics_dict['/joint_states']['msg']
print 'there are %d joint state messages in bag' % len(joint_states)
j_segs = segment_msgs(time_segments, topics_dict['/joint_states']['msg'])
jseg_dicts = [converter.msgs_to_dict(seg) for seg in j_segs]
# find the first set of joint states
j0_dict = jseg_dicts[0][0]
## perform FK
rospy.loginfo('Performing FK to find tip locations')
bf_T_obj = htf.composeHomogeneousTransform(start_conditions['pose_parameters']['frame_bf'],
start_conditions['pose_parameters']['center_bf'])
obj_T_bf = np.linalg.inv(bf_T_obj)
for jseg_dict in jseg_dicts:
for d in jseg_dict:
rtip_bf = pr2_kinematics.right.fk('base_footprint',
'r_wrist_roll_link', 'r_gripper_tool_frame',
d['poses']['rarm'].A1.tolist())
ltip_bf = pr2_kinematics.left.fk('base_footprint',
'l_wrist_roll_link', 'l_gripper_tool_frame',
d['poses']['larm'].A1.tolist())
rtip_obj = obj_T_bf * rtip_bf
ltip_obj = obj_T_bf * ltip_bf
d['rtip_obj'] = tfu.matrix_as_tf(rtip_obj)
d['ltip_obj'] = tfu.matrix_as_tf(ltip_obj)
d['rtip_bf'] = tfu.matrix_as_tf(rtip_bf)
d['ltip_bf'] = tfu.matrix_as_tf(ltip_bf)
###############################################################################
# make movement state dictionaries, one for each state
movement_states = []
for i, seg in enumerate(time_segments):
name = "state_%d" % i
start_times = [lcart_seg[i][0].header.stamp.to_time(),
rcart_seg[i][0].header.stamp.to_time(),
jseg_dicts[i][0]['time'],
pressure_lseg[i][0].header.stamp.to_time(),
pressure_rseg[i][0].header.stamp.to_time()]
sdict = {'name': name,
'start_time': np.min(start_times),
'cartesian': [[ru.ros_to_dict(ps) for ps in lcart_seg[i]],
[ru.ros_to_dict(ps) for ps in rcart_seg[i]]],
'joint_states': jseg_dicts[i]
#'pressure': [pressure_lseg[i], pressure_rseg[i]]
}
movement_states.append(sdict)
# store in a dict
data = {'start_conditions': start_conditions, # ['camera_info', 'map_T_bf', 'pro_T_bf', 'points' (in base_frame),
# 'highdef_image', 'model_image',
## 'pose_parameters'
## 'descriptors'
## 'directions' (wrt to cannonical orientation)
## 'closest_feature'
'base_pose': pose_base,
'robot_pose': j0_dict,
'arm': arm_used,
'movement_states': movement_states}
# save dicts to pickles
processed_bag_name = '%s_processed.pkl' % os.path.join(bag_path, filename)
rospy.loginfo('saving to %s' % processed_bag_name)
ut.save_pickle(data, processed_bag_name)
bag_playback.join()
rospy.loginfo('finished!')
#python bag_processor.py 08_06/light/off/_2010-08-06-13-35-04.bag 08_06/light/off/off_start.png light_switch_model.png 08_06/light/off/off_start.pkl
if __name__ == '__main__':
arm_used = 'left'
full_bag_name = sys.argv[1]
prosilica_image_file = sys.argv[2]
model_image_file = sys.argv[3]
experiment_start_condition_pkl = sys.argv[4]
process_bag(full_bag_name, prosilica_image_file, model_image_file, experiment_start_condition_pkl)
#def create_frame2(contact_point, points3d, p=np.matrix([1,0,0.]).T):
### contact point is the center, local plane from 3D points close to contact
# u, s, vh = np.linalg.svd(points3d)
# u = np.matrix(u)
# #pdb.set_trace()
#
# # Pick normal
# if (u[:,2].T * p)[0,0] < 0:
# normal = -u[:,2]
# else:
# normal = u[:,2]
#
# # pick the next direction as the one closest to to +z or +x
# z_plus = np.matrix([0,0,1.0]).T
# x_plus = np.matrix([1,0,0.0]).T
#
# u0 = u[:,0]
# u1 = u[:,1]
#
# mags = []
# pos_dirs = []
# for udir in [u0, u1, -u0, -u1]:
# for can_dir in [z_plus, x_plus]:
# mags.append((udir.T * can_dir)[0,0])
# pos_dirs.append(udir)
# x_dir = pos_dirs[np.argmax(mags)]
#
# # Cross product for the final (is this the same as the final vector?)
# y_dir = np.cross(normal.T, x_dir.T).T
# return np.column_stack([x_dir, y_dir, normal])
#for loc, lap, size, direction, hess in model_surf_loc:
# drad = np.radians(direction)
# #project direction into the cannonical object frame
# #surf_dir_obj = object_frame_pro * np.matrix([np.cos(drad), np.sin(drad), 0.]).T
# #obj_R_bf = frame_bf.T
# bf_R_pro = (start_conditions['pro_T_bf'][0:3,0:3]).T
# surf_dir_obj = frame_bf.T * bf_R_pro * np.matrix([np.cos(drad), np.sin(drad), 0.]).T
# #measure angle between SURF feature and x axis of object frame, store this as delta theta
# delta_theta = math.atan2(surf_dir_obj[1,0], surf_dir_obj[0,0])
# surf_directions.append(delta_theta)
##print 'frame_bf.T', frame_bf.T
##print 'bf_R_pro', (start_conditions['pro_T_bf'][0:3,0:3]).T
#r = rospy.Rate(10)
#while not rospy.is_shutdown() and not find_contact_locs.contact_stopped:
# r.sleep()
#contact_locs = find_contact_locs.contact_locs
#et = ExtractTFData()
# ['camera_info', 'map_T_bf', 'pro_T_bf', 'points']
#print 'got loc %.2f %.2f %.2f'% (t[0], t[1], t[2])
#loc_fname = '%s_loc.pkl' % os.path.join(bag_path, filename)
#print 'saving to %s' % loc_fname
#ut.save_pickle((t,r), loc_fname)
#print contact_times - contact_times[0]
#print contact_times[1:] - contact_times[:-1]
#pb.plot(contact_times-contact_times[0], 'g.')
#pb.show()
# pose_base = [t, r], t is len3, r is len4
# j0_dict {'poses': joint_poses, 'vels': joint_vel, 'efforts': joint_eff, 'time': msg.header.stamp.to_time()}
# with each entry having keys: ['rarm'] ['larm'] ['head_traj'] ['torso']
# arm is a string {'left', 'right'}
# movement_states
# 'name'
# 'start_time'
# 'cartesian'
# 'joint_states'
# 'pressure'
#'movement_states': [{'name': #,
# 'cartesian':#,
# 'joint_states': # j_dicts, j_times
# 'pressure': #,
# }]
# #}
##ut.save_pickle(data, extracted_name)
#if len(joint_states) <= 1:
# raise RuntimeError('Not enough joint state messages. Got %d messages.' % len(joint_states))
#joint_states)
| [
[
1,
0,
0.0013,
0.0013,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0013,
0.0013,
0,
0.66,
0.027,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0025,
0.0013,
0,
0.6... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import rospy",
"import sys",
"import hrl_lib.util as ut",
"import hrl_lib.rutils as ru",
"import tf",
"import hrl_lib.transforms as htf",
"import hrl_lib.tf_utils as tfu",
"import tf.transf... |
import roslib; roslib.load_manifest('hai_sandbox')
import rospy
import hai_sandbox.recognize_3d as r3d
import hrl_lib.util as ut
import cv
import sys
fname = sys.argv[1]
pkl = ut.load_pickle(fname)
image_name = pkl['image']
img = cv.LoadImageM(image_name)
#Draw the center
r3d.draw_points(img, pkl['center'], [255, 0, 0], 6, 2)
if pkl.has_key('pos'):
pos_exp = pkl['pos']
neg_exp = pkl['neg']
#Draw points tried
r3d.draw_points(img, pos_exp, [50, 255, 0], 9, 1)
r3d.draw_points(img, neg_exp, [50, 0, 255], 9, 1)
if pkl.has_key('pos_pred'):
pos_pred = pkl['pos_pred']
neg_pred = pkl['neg_pred']
#Draw prediction
r3d.draw_points(img, pos_pred, [255, 204, 51], 3, -1)
r3d.draw_points(img, neg_pred, [51, 204, 255], 3, -1)
#Draw what we're selecting
tried_point, label = pkl['tried']
if label == r3d.POSITIVE:
color = [0,255,0]
else:
color = [0,0,255]
r3d.draw_points(img, tried_point, color, 8, -1)
cv.NamedWindow('task relevant learner display', cv.CV_WINDOW_AUTOSIZE)
cv.ShowImage('task relevant learner display', img)
while True:
cv.WaitKey(33)
| [
[
1,
0,
0.0233,
0.0233,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0233,
0.0233,
0,
0.66,
0.0526,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0465,
0.0233,
0,
0.... | [
"import roslib; roslib.load_manifest('hai_sandbox')",
"import roslib; roslib.load_manifest('hai_sandbox')",
"import rospy",
"import hai_sandbox.recognize_3d as r3d",
"import hrl_lib.util as ut",
"import cv",
"import sys",
"fname = sys.argv[1]",
"pkl = ut.load_pickle(fname)",
"image_name = pkl['ima... |
import roslib
roslib.load_manifest('trigger_msgs')
import sys
import rospy
import trigger_msgs.msg
rospy.init_node("trigger", anonymous=True)
head_up = rospy.Publisher("head_up", trigger_msgs.msg.Trigger, latch = True)
head_down = rospy.Publisher("head_down", trigger_msgs.msg.Trigger, latch = True)
arm_on = rospy.Publisher("arm_on", trigger_msgs.msg.Trigger, latch = True)
arm_off = rospy.Publisher("arm_off", trigger_msgs.msg.Trigger, latch = True)
r = rospy.Rate(60/60.)
i = 1
while not rospy.is_shutdown():
print '------------', i, '-------------'
if i > 4:
if i % 2 == 1:
#Down
print 'down'
head_down.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.0)), rospy.get_param("~event", ""))
if i % 2 == 0:
#Up
print 'up'
head_up.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.0)), rospy.get_param("~event", ""))
if i % 4 == 1:
arm_on.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.0)), rospy.get_param("~event", ""))
if i % 4 == 3:
arm_off.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.0)), rospy.get_param("~event", ""))
i = i+1
r.sleep()
| [
[
1,
0,
0.0263,
0.0263,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0526,
0.0263,
0,
0.66,
0.0833,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0789,
0.0263,
0,
0.... | [
"import roslib",
"roslib.load_manifest('trigger_msgs')",
"import sys",
"import rospy",
"import trigger_msgs.msg",
"rospy.init_node(\"trigger\", anonymous=True)",
"head_up = rospy.Publisher(\"head_up\", trigger_msgs.msg.Trigger, latch = True)",
"head_down = rospy.Publisher(\"head_down\", trigger_msgs.m... |
import roslib
roslib.load_manifest('trigger_msgs')
import sys
import rospy
import cv
import trigger_msgs.msg
rospy.init_node("trigger", anonymous=True)
arm_trigger = rospy.Publisher("arm_trigger", trigger_msgs.msg.Trigger, latch = True)
head_trigger = rospy.Publisher("head_trigger", trigger_msgs.msg.Trigger, latch = True)
cv.NamedWindow('keyboard', 1)
img = cv.CreateImage((30, 30), cv.IPL_DEPTH_8U, 1)
#r = rospy.Rate(132/60.)
r = rospy.Rate(10.)
i = 0
while not rospy.is_shutdown():
cv.ShowImage('keyboard', img)
k = cv.WaitKey(10)
#print (k & 0xff), k
if chr(k & 0xff) == 'h':
print 'head!'
head_trigger.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.0)), rospy.get_param("~event", ""))
if chr(k & 0xff) == 'a':
print 'arm!'
arm_trigger.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.0)), rospy.get_param("~event", ""))
#if i % 4 == 0:
#if i % 8 == 0:
#i = i+1
#r.sleep()
| [
[
1,
0,
0.0286,
0.0286,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0571,
0.0286,
0,
0.66,
0.0769,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0857,
0.0286,
0,
0.... | [
"import roslib",
"roslib.load_manifest('trigger_msgs')",
"import sys",
"import rospy",
"import cv",
"import trigger_msgs.msg",
"rospy.init_node(\"trigger\", anonymous=True)",
"arm_trigger = rospy.Publisher(\"arm_trigger\", trigger_msgs.msg.Trigger, latch = True)",
"head_trigger = rospy.Publisher(\"h... |
import roslib
roslib.load_manifest('trigger_msgs')
import sys
import rospy
import trigger_msgs.msg
rospy.init_node("trigger", anonymous=True)
left_initial_pose00 = rospy.Publisher("left_initial_pose00", trigger_msgs.msg.Trigger, latch = True)
right_initial_pose00 = rospy.Publisher("right_initial_pose00", trigger_msgs.msg.Trigger, latch = True)
head_initial_pose00 = rospy.Publisher("head_initial_pose00", trigger_msgs.msg.Trigger, latch = True)
# head_initial_pose00
r = rospy.Rate(60/60.)
head_initial_pose00.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 1)), rospy.get_param("~event", ""))
left_initial_pose00.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 10)), rospy.get_param("~event", ""))
right_initial_pose00.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 10)), rospy.get_param("~event", ""))
r.sleep()
r.sleep()
r.sleep()
| [
[
1,
0,
0.0455,
0.0455,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0909,
0.0455,
0,
0.66,
0.0667,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.1364,
0.0455,
0,
0.... | [
"import roslib",
"roslib.load_manifest('trigger_msgs')",
"import sys",
"import rospy",
"import trigger_msgs.msg",
"rospy.init_node(\"trigger\", anonymous=True)",
"left_initial_pose00 = rospy.Publisher(\"left_initial_pose00\", trigger_msgs.msg.Trigger, latch = True)",
"right_initial_pose00 = rospy.Publ... |
import roslib
roslib.load_manifest('trigger_msgs')
import sys
import rospy
import trigger_msgs.msg
import dynamic_reconfigure.client
import threading
import time
import geometry_msgs.msg as gm
rospy.init_node("wake_up", anonymous=True)
projector_on = {'camera_reset': False,
'forearm_l_rate': 30.0,
'forearm_l_trig_mode': 1,
'forearm_r_rate': 30.0,
'forearm_r_trig_mode': 1,
'narrow_stereo_trig_mode': 2,
'projector_mode': 3,
'projector_pulse_length': 0.002,
'projector_pulse_shift': 0.0,
'projector_rate': 58.823529411764703,
'projector_tweak': 0.0,
'prosilica_projector_inhibit': False,
'stereo_rate': 29.411764705882351,
'wide_stereo_trig_mode': 2}
projector_off = {'camera_reset': False,
'forearm_l_rate': 30.0,
'forearm_l_trig_mode': 1,
'forearm_r_rate': 30.0,
'forearm_r_trig_mode': 1,
'narrow_stereo_trig_mode': 2,
'projector_mode': 1,
'projector_pulse_length': 0.002,
'projector_pulse_shift': 0.0,
'projector_rate': 58.823529411764703,
'projector_tweak': 0.0,
'prosilica_projector_inhibit': False,
'stereo_rate': 29.411764705882351,
'wide_stereo_trig_mode': 2}
projector = dynamic_reconfigure.client.Client('camera_synchronizer_node')
#move_base = rospy.Publisher('simple_move_base', gm.Pose2D)
blink_time = .2
print 'on'
projector.update_configuration(projector_on)
time.sleep(1)
print 'off'
projector.update_configuration(projector_off)
time.sleep(blink_time)
print 'on'
projector.update_configuration(projector_on)
time.sleep(blink_time)
print 'off'
projector.update_configuration(projector_off)
time.sleep(blink_time)
print 'on'
projector.update_configuration(projector_on)
time.sleep(blink_time)
print 'off'
projector.update_configuration(projector_off)
time.sleep(1)
print 'on'
projector.update_configuration(projector_on)
time.sleep(10)
#p2d = gm.Pose2D()
#p2d.x = .6
#p2d.y = .15
# move_base.publish(p2d)
#r = rospy.Rate(60/60.)
#projector_animation = ProjectorWakeUp()
#projector_animation.start()
#time.sleep(3)
#time.sleep(blink_time)
#self.projector.update_configuration(self.projector_off)
#self.projector.update_configuration(self.projector_on)
#time.sleep(.2)
#self.projector.update_configuration(self.projector_off)
#time.sleep(.05)
#self.projector.update_configuration(self.projector_on)
#time.sleep(.05)
#self.projector.update_configuration(self.projector_off)
#time.sleep(.05)
#self.projector.update_configuration(self.projector_on)
#time.sleep(.05)
#self.projector.update_configuration(self.projector_off)
#time.sleep(2)
#self.projector.update_configuration(self.projector_on)
#time.sleep(3)
| [
[
1,
0,
0.0074,
0.0074,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0147,
0.0074,
0,
0.66,
0.0294,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0221,
0.0074,
0,
0.... | [
"import roslib",
"roslib.load_manifest('trigger_msgs')",
"import sys",
"import rospy",
"import trigger_msgs.msg",
"import dynamic_reconfigure.client",
"import threading",
"import time",
"import geometry_msgs.msg as gm",
"rospy.init_node(\"wake_up\", anonymous=True)",
"projector_on = {'camera_res... |
import roslib
roslib.load_manifest('trigger_msgs')
import sys
import rospy
import dynamic_reconfigure.client
import time
import trigger_msgs.msg
import geometry_msgs.msg as gm
import time
import sys
dist = float(sys.argv[1])
print dist
rospy.init_node("move_back", anonymous=True)
move_base = rospy.Publisher('simple_move_base', gm.Pose2D)
r = rospy.Rate(10)
p2d = gm.Pose2D()
p2d.x = dist
p2d.y = .15
print 'move_back -.6'
r.sleep()
move_base.publish(p2d)
r.sleep()
r.sleep()
r.sleep()
r.sleep()
r.sleep()
#import threading as tr
#class ProjectorWakeUp(threading.Thread):
#
# def __init__(self):
# threading.Thread.__init__(self)
# self.projector = dynamic_reconfigure.client.Client('camera_synchronizer_node')
#
# def run(self):
# self.projector.update_configuration(projector_on)
# time.sleep(.2)
# self.projector.update_configuration(projector_off)
# time.sleep(.05)
# self.projector.update_configuration(projector_on)
# time.sleep(.05)
# self.projector.update_configuration(projector_off)
# time.sleep(2)
# self.projector.update_configuration(projector_on)
#head_up = rospy.Publisher("head_up", trigger_msgs.msg.Trigger, latch = True)
#head_down = rospy.Publisher("head_down", trigger_msgs.msg.Trigger, latch = True)
#head_down_up = rospy.Publisher("head_down_up", trigger_msgs.msg.Trigger, latch = True)
#arm_on = rospy.Publisher("light_on", trigger_msgs.msg.Trigger, latch = True)
#arm_off = rospy.Publisher("light_off", trigger_msgs.msg.Trigger, latch = True)
#
#left_initial_pose = rospy.Publisher("left_start", trigger_msgs.msg.Trigger, latch = True)
#left_initial_pose0 = rospy.Publisher("left_start2", trigger_msgs.msg.Trigger, latch = True)
#right_initial_pose0 = rospy.Publisher("right_initial_pose0", trigger_msgs.msg.Trigger, latch = True)
##right_initial_pose00 = rospy.Publisher("right_initial_pose00", trigger_msgs.msg.Trigger, latch = True)
#froo_froo = rospy.Publisher("froo_froo", trigger_msgs.msg.Trigger, latch = True)
#head_look_around = rospy.Publisher("head_look_around2", trigger_msgs.msg.Trigger, latch = True)
#
#both_arms_forward2 = rospy.Publisher("both_arms_forward2", trigger_msgs.msg.Trigger, latch = True)
#both_arms_fold2 = rospy.Publisher("both_arms_fold2", trigger_msgs.msg.Trigger, latch = True)
#both_arms_fold_end_pose = rospy.Publisher("both_arms_fold_end_pose", trigger_msgs.msg.Trigger, latch = True)
#head_turn = rospy.Publisher("head_turn", trigger_msgs.msg.Trigger, latch = True)
#
#arm_spin = rospy.Publisher("arm_spin", trigger_msgs.msg.Trigger, latch = True)
#raise_the_roof = rospy.Publisher("raise_the_roof", trigger_msgs.msg.Trigger, latch = True)
#head_up_full = rospy.Publisher("head_up_full", trigger_msgs.msg.Trigger, latch = True)
#head_down_full = rospy.Publisher("head_down_full", trigger_msgs.msg.Trigger, latch = True)
#hand_up = rospy.Publisher("hand_up", trigger_msgs.msg.Trigger, latch = True)
#hand_down = rospy.Publisher("hand_down", trigger_msgs.msg.Trigger, latch = True)
#left_initial_pose00 = rospy.Publisher("left_initial_pose00", trigger_msgs.msg.Trigger, latch = True)
# right_initial_pose00 = rospy.Publisher("right_initial_pose00", trigger_msgs.msg.Trigger, latch = True)
#right_initial_pose = rospy.Publisher("right_initial_pose", trigger_msgs.msg.Trigger, latch = True)
#projector_on = {'camera_reset': False,
# 'forearm_l_rate': 30.0,
# 'forearm_l_trig_mode': 1,
# 'forearm_r_rate': 30.0,
# 'forearm_r_trig_mode': 1,
# 'narrow_stereo_trig_mode': 2,
# 'projector_mode': 3,
# 'projector_pulse_length': 0.002,
# 'projector_pulse_shift': 0.0,
# 'projector_rate': 58.823529411764703,
# 'projector_tweak': 0.0,
# 'prosilica_projector_inhibit': False,
# 'stereo_rate': 29.411764705882351,
# 'wide_stereo_trig_mode': 2}
#
#
#projector_off = {'camera_reset': False,
# 'forearm_l_rate': 30.0,
# 'forearm_l_trig_mode': 1,
# 'forearm_r_rate': 30.0,
# 'forearm_r_trig_mode': 1,
# 'narrow_stereo_trig_mode': 2,
# 'projector_mode': 1,
# 'projector_pulse_length': 0.002,
# 'projector_pulse_shift': 0.0,
# 'projector_rate': 58.823529411764703,
# 'projector_tweak': 0.0,
# 'prosilica_projector_inhibit': False,
# 'stereo_rate': 29.411764705882351,
# 'wide_stereo_trig_mode': 2}
#projector = dynamic_reconfigure.client.Client('camera_synchronizer_node')
#r = rospy.Rate(120/60.)
#time.sleep(.2)
#r.sleep()
#projector_animation = ProjectorWakeUp()
#i = -23
#while not rospy.is_shutdown():
#
# print '------------', i, '-------------'
# if i == -23:
# print 'left_initial_pose0'
# left_initial_pose0.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 1.5)), rospy.get_param("~event", ""))
#
# if i == -12:
# print 'left_initial_pose'
# left_initial_pose.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 2.0)), rospy.get_param("~event", ""))
#
# if i == -6:
# print 'arm_on'
# arm_on.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.3)), rospy.get_param("~event", ""))
#
# if i == -5:
# print 'head_look_around'
# head_look_around.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.2)), rospy.get_param("~event", ""))
#
# if i >= 9 and i <= 49:
# arm_i = (i - 9)
# if arm_i % 8 == 0:
# print 'lights off'
# arm_off.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", .8)), rospy.get_param("~event", ""))
#
# if arm_i % 8 == 4:
# print 'lights on'
# arm_on.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", .8)), rospy.get_param("~event", ""))
#
# if i >= 15 and i <= 34:
# head_i = i - 15
# if head_i % 4 == 0:
# #Down
# print 'down'
# head_down_up.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.1)), rospy.get_param("~event", ""))
# #head_up.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.0)), rospy.get_param("~event", ""))
#
# if head_i % 4 == 2:
# #Up
# print 'up'
# #head_down.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.8)), rospy.get_param("~event", ""))
# #head_up.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.0)), rospy.get_param("~event", ""))
# #hand_up.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.0)), rospy.get_param("~event", ""))
#
# if i >= 40 and i <= 43:
# head_i = i - 41
# if head_i % 4 == 0:
# head_down_up.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.1)), rospy.get_param("~event", ""))
# print 'down'
#
# if head_i % 4 == 2:
# print 'up'
# #head_up.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.8)), rospy.get_param("~event", ""))
#
# ################################################################################
# ### FREESTYLE
# ################################################################################
# if i == 23:
# print 'right_initial_pose0'
# right_initial_pose0.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.3)), rospy.get_param("~event", ""))
#
# #if i == 24:
# # print 'right_initial_pose0'
# # right_initial_pose00.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 3.5)), rospy.get_param("~event", ""))
#
# if i == 26:
# #if i == 29:
# print 'arm_spin'
# arm_spin.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 3.0)), rospy.get_param("~event", ""))
#
# #if i >= 29 and i < 37:
# # if ((i-29) % 9) == 0:
# # print 'Free style!'
# # froo_froo.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 2.0)), rospy.get_param("~event", ""))
#
# if i == 42:
# #if i == 45:
# #ANOTHER FREESTYLE
# print 'raise_the_roof'
# raise_the_roof.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 3.0)), rospy.get_param("~event", ""))
#
# ###############################################################################
# ## Dancy
# ###############################################################################
# if i == 53:
# print 'head_down'
# print 'both_arms_forward2'
# both_arms_forward2.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 2)), rospy.get_param("~event", ""))
#
# if i == 56:
# p2d = gm.Pose2D()
# p2d.x = -.4
# p2d.y = .15
# move_base.publish(p2d)
# head_down_full.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.5)), rospy.get_param("~event", ""))
#
# if i == 61:
# print 'head_up'
# print 'both_arms_fold2'
# head_up_full.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.4)), rospy.get_param("~event", ""))
# both_arms_fold2.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.8)), rospy.get_param("~event", ""))
#
# if i == 65:
# p2d = gm.Pose2D()
# p2d.y = 100.
# p2d.theta = -390
# move_base.publish(p2d)
#
# if i == 77:
# print 'both_arms_fold_end_pose'
# print 'head_turn'
# head_turn.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 7.0)), rospy.get_param("~event", ""))
# both_arms_fold_end_pose.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.8)), rospy.get_param("~event", ""))
#
# i = i+1
# r.sleep()
# if i == 80:
# break
#projector.update_configuration(projector_off)
#if i == -12:
# left_initial_pose00.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 5)), rospy.get_param("~event", ""))
# right_initial_pose00.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 5)), rospy.get_param("~event", ""))
#if i == 43:
# right_initial_pose.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 3.)), rospy.get_param("~event", ""))
| [
[
1,
0,
0.0039,
0.0039,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0078,
0.0039,
0,
0.66,
0.04,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0116,
0.0039,
0,
0.66... | [
"import roslib",
"roslib.load_manifest('trigger_msgs')",
"import sys",
"import rospy",
"import dynamic_reconfigure.client",
"import time",
"import trigger_msgs.msg",
"import geometry_msgs.msg as gm",
"import time",
"import sys",
"dist = float(sys.argv[1])",
"print(dist)",
"rospy.init_node(\"... |
import roslib
roslib.load_manifest('trigger_msgs')
import sys
import rospy
import dynamic_reconfigure.client
import time
import trigger_msgs.msg
import geometry_msgs.msg as gm
#import threading as tr
#class ProjectorWakeUp(threading.Thread):
#
# def __init__(self):
# threading.Thread.__init__(self)
# self.projector = dynamic_reconfigure.client.Client('camera_synchronizer_node')
#
# def run(self):
# self.projector.update_configuration(projector_on)
# time.sleep(.2)
# self.projector.update_configuration(projector_off)
# time.sleep(.05)
# self.projector.update_configuration(projector_on)
# time.sleep(.05)
# self.projector.update_configuration(projector_off)
# time.sleep(2)
# self.projector.update_configuration(projector_on)
rospy.init_node("trigger", anonymous=True)
head_up = rospy.Publisher("head_up", trigger_msgs.msg.Trigger, latch = True)
head_down = rospy.Publisher("head_down", trigger_msgs.msg.Trigger, latch = True)
head_down_up = rospy.Publisher("head_down_up", trigger_msgs.msg.Trigger, latch = True)
arm_on = rospy.Publisher("light_on", trigger_msgs.msg.Trigger, latch = True)
arm_off = rospy.Publisher("light_off", trigger_msgs.msg.Trigger, latch = True)
left_initial_pose = rospy.Publisher("left_start", trigger_msgs.msg.Trigger, latch = True)
left_initial_pose0 = rospy.Publisher("left_start2", trigger_msgs.msg.Trigger, latch = True)
right_initial_pose0 = rospy.Publisher("right_initial_pose0", trigger_msgs.msg.Trigger, latch = True)
#right_initial_pose00 = rospy.Publisher("right_initial_pose00", trigger_msgs.msg.Trigger, latch = True)
froo_froo = rospy.Publisher("froo_froo", trigger_msgs.msg.Trigger, latch = True)
head_look_around = rospy.Publisher("head_look_around2", trigger_msgs.msg.Trigger, latch = True)
both_arms_forward2 = rospy.Publisher("both_arms_forward2", trigger_msgs.msg.Trigger, latch = True)
both_arms_fold2 = rospy.Publisher("both_arms_fold2", trigger_msgs.msg.Trigger, latch = True)
both_arms_fold_end_pose = rospy.Publisher("both_arms_fold_end_pose", trigger_msgs.msg.Trigger, latch = True)
head_turn = rospy.Publisher("head_turn", trigger_msgs.msg.Trigger, latch = True)
arm_spin = rospy.Publisher("arm_spin", trigger_msgs.msg.Trigger, latch = True)
raise_the_roof = rospy.Publisher("raise_the_roof", trigger_msgs.msg.Trigger, latch = True)
head_up_full = rospy.Publisher("head_up_full", trigger_msgs.msg.Trigger, latch = True)
head_down_full = rospy.Publisher("head_down_full", trigger_msgs.msg.Trigger, latch = True)
move_base = rospy.Publisher('simple_move_base', gm.Pose2D)
#hand_up = rospy.Publisher("hand_up", trigger_msgs.msg.Trigger, latch = True)
#hand_down = rospy.Publisher("hand_down", trigger_msgs.msg.Trigger, latch = True)
#left_initial_pose00 = rospy.Publisher("left_initial_pose00", trigger_msgs.msg.Trigger, latch = True)
# right_initial_pose00 = rospy.Publisher("right_initial_pose00", trigger_msgs.msg.Trigger, latch = True)
#right_initial_pose = rospy.Publisher("right_initial_pose", trigger_msgs.msg.Trigger, latch = True)
projector_on = {'camera_reset': False,
'forearm_l_rate': 30.0,
'forearm_l_trig_mode': 1,
'forearm_r_rate': 30.0,
'forearm_r_trig_mode': 1,
'narrow_stereo_trig_mode': 2,
'projector_mode': 3,
'projector_pulse_length': 0.002,
'projector_pulse_shift': 0.0,
'projector_rate': 58.823529411764703,
'projector_tweak': 0.0,
'prosilica_projector_inhibit': False,
'stereo_rate': 29.411764705882351,
'wide_stereo_trig_mode': 2}
projector_off = {'camera_reset': False,
'forearm_l_rate': 30.0,
'forearm_l_trig_mode': 1,
'forearm_r_rate': 30.0,
'forearm_r_trig_mode': 1,
'narrow_stereo_trig_mode': 2,
'projector_mode': 1,
'projector_pulse_length': 0.002,
'projector_pulse_shift': 0.0,
'projector_rate': 58.823529411764703,
'projector_tweak': 0.0,
'prosilica_projector_inhibit': False,
'stereo_rate': 29.411764705882351,
'wide_stereo_trig_mode': 2}
projector = dynamic_reconfigure.client.Client('camera_synchronizer_node')
r = rospy.Rate(120/60.)
#time.sleep(.2)
#r.sleep()
#projector_animation = ProjectorWakeUp()
i = -23
while not rospy.is_shutdown():
print '------------', i, '-------------'
if i == -23:
print 'left_initial_pose0'
left_initial_pose0.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 1.5)), rospy.get_param("~event", ""))
if i == -12:
print 'left_initial_pose'
left_initial_pose.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 2.0)), rospy.get_param("~event", ""))
if i == -6:
print 'arm_on'
arm_on.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.3)), rospy.get_param("~event", ""))
if i == -5:
print 'head_look_around'
head_look_around.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.2)), rospy.get_param("~event", ""))
if i >= 9 and i <= 49:
arm_i = (i - 9)
if arm_i % 8 == 0:
print 'lights off'
arm_off.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", .8)), rospy.get_param("~event", ""))
if arm_i % 8 == 4:
print 'lights on'
arm_on.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", .8)), rospy.get_param("~event", ""))
if i >= 15 and i <= 34:
head_i = i - 15
if head_i % 4 == 0:
#Down
print 'down'
head_down_up.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.1)), rospy.get_param("~event", ""))
#head_up.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.0)), rospy.get_param("~event", ""))
if head_i % 4 == 2:
#Up
print 'up'
#head_down.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.8)), rospy.get_param("~event", ""))
#head_up.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.0)), rospy.get_param("~event", ""))
#hand_up.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.0)), rospy.get_param("~event", ""))
if i >= 40 and i <= 43:
head_i = i - 41
if head_i % 4 == 0:
head_down_up.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.1)), rospy.get_param("~event", ""))
print 'down'
if head_i % 4 == 2:
print 'up'
#head_up.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.8)), rospy.get_param("~event", ""))
################################################################################
### FREESTYLE
################################################################################
if i == 23:
print 'right_initial_pose0'
right_initial_pose0.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.3)), rospy.get_param("~event", ""))
#if i == 24:
# print 'right_initial_pose0'
# right_initial_pose00.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 3.5)), rospy.get_param("~event", ""))
if i == 26:
#if i == 29:
print 'arm_spin'
arm_spin.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 3.0)), rospy.get_param("~event", ""))
#if i >= 29 and i < 37:
# if ((i-29) % 9) == 0:
# print 'Free style!'
# froo_froo.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 2.0)), rospy.get_param("~event", ""))
if i == 42:
#if i == 45:
#ANOTHER FREESTYLE
print 'raise_the_roof'
raise_the_roof.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 3.0)), rospy.get_param("~event", ""))
###############################################################################
## Dancy
###############################################################################
if i == 53:
print 'head_down'
print 'both_arms_forward2'
both_arms_forward2.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 2)), rospy.get_param("~event", ""))
if i == 56:
p2d = gm.Pose2D()
p2d.x = -.4
p2d.y = .15
move_base.publish(p2d)
head_down_full.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.5)), rospy.get_param("~event", ""))
if i == 61:
print 'head_up'
print 'both_arms_fold2'
head_up_full.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.4)), rospy.get_param("~event", ""))
both_arms_fold2.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.8)), rospy.get_param("~event", ""))
if i == 65:
p2d = gm.Pose2D()
p2d.y = 100.
p2d.theta = -390
move_base.publish(p2d)
if i == 77:
print 'both_arms_fold_end_pose'
print 'head_turn'
head_turn.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 7.0)), rospy.get_param("~event", ""))
both_arms_fold_end_pose.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.8)), rospy.get_param("~event", ""))
i = i+1
r.sleep()
if i == 80:
break
projector.update_configuration(projector_off)
#if i == -12:
# left_initial_pose00.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 5)), rospy.get_param("~event", ""))
# right_initial_pose00.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 5)), rospy.get_param("~event", ""))
#if i == 43:
# right_initial_pose.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 3.)), rospy.get_param("~event", ""))
| [
[
1,
0,
0.0042,
0.0042,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0083,
0.0042,
0,
0.66,
0.0294,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0125,
0.0042,
0,
0.... | [
"import roslib",
"roslib.load_manifest('trigger_msgs')",
"import sys",
"import rospy",
"import dynamic_reconfigure.client",
"import time",
"import trigger_msgs.msg",
"import geometry_msgs.msg as gm",
"rospy.init_node(\"trigger\", anonymous=True)",
"head_up = rospy.Publisher(\"head_up\", trigger_ms... |
import roslib
roslib.load_manifest('trigger_msgs')
import sys
import rospy
import cv
import trigger_msgs.msg
rospy.init_node("trigger", anonymous=True)
arm_trigger = rospy.Publisher("arm_trigger", trigger_msgs.msg.Trigger, latch = True)
head_trigger = rospy.Publisher("head_trigger", trigger_msgs.msg.Trigger, latch = True)
cv.NamedWindow('keyboard', 1)
img = cv.CreateImage((30, 30), cv.IPL_DEPTH_8U, 1)
#r = rospy.Rate(132/60.)
r = rospy.Rate(10.)
i = 0
while not rospy.is_shutdown():
cv.ShowImage('keyboard', img)
k = cv.WaitKey(10)
#print (k & 0xff), k
if chr(k & 0xff) == 'h':
print 'head!'
head_trigger.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.0)), rospy.get_param("~event", ""))
if chr(k & 0xff) == 'a':
print 'arm!'
arm_trigger.publish(rospy.get_rostime() + rospy.Duration(rospy.get_param("~delay", 0.0)), rospy.get_param("~event", ""))
#if i % 4 == 0:
#if i % 8 == 0:
#i = i+1
#r.sleep()
| [
[
1,
0,
0.0286,
0.0286,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0571,
0.0286,
0,
0.66,
0.0769,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0857,
0.0286,
0,
0.... | [
"import roslib",
"roslib.load_manifest('trigger_msgs')",
"import sys",
"import rospy",
"import cv",
"import trigger_msgs.msg",
"rospy.init_node(\"trigger\", anonymous=True)",
"arm_trigger = rospy.Publisher(\"arm_trigger\", trigger_msgs.msg.Trigger, latch = True)",
"head_trigger = rospy.Publisher(\"h... |
#!/usr/bin/env python
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
import roslib; roslib.load_manifest('laser_camera_segmentation')
import rospy
from laser_camera_segmentation.srv._Segmentation import *
import sensor_msgs
from laser_camera_segmentation.ROS_interface_helper_functions import *
import laser_camera_segmentation.processor as processor
import laser_camera_segmentation.configuration as configuration
from labeling.scan_dataset import scan_dataset
def segment_pointcloud(request):
#convert data from ROS
pts3d, intensities, labels = convert_ROS_pointcloud_to_pointcloud(request.pointcloud)
cvimage = convert_ROS_image_to_cvimage(request.image, request.imageWidth, request.imageHeight)
polygon = convert_ROS_polygon_to_polygon(request.regionOfInterest2D)
polygon.set_label('surface')
print polygon, polygon.get_points()
#create processor and configuration
#cfg = configuration.configuration('/home/martin/robot1_data/usr/martin/ROS_server_test', 'codyRobot')
cfg = configuration.configuration('../data/ROS_server', 'dummyScanner')
pc = processor.processor(cfg)
pc.scan_dataset = scan_dataset()
pc.scan_dataset.image_artag_filename = ''
pc.scan_dataset.table_plane_translation = np.matrix([0,0,0]).T
pc.scan_dataset.ground_plane_translation = np.matrix([0,0,request.laserHeightAboveGround]).T
pc.scan_dataset.ground_plane_rotation = ''
#pc.scan_dataset.is_test_set = True
pc.scan_dataset.is_labeled = True
pc.scan_dataset.id = 'ROStest'
pc.img = cvimage
pc.image_angle = request.imageAngle
pc.pts3d = pts3d
pc.intensities = intensities
pc.scan_indices = np.zeros(len(intensities))
pc.scan_dataset.polygons.append(polygon)
pc.do_all_point_cloud()
pc.do_polygon_mapping()
pc.scan_dataset.ground_plane_normal = np.matrix([0.,0.,1.]).T
if request.numberOfPointsToClassify == -1:
n = 999999999999
else:
n = request.numberOfPointsToClassify
feature_data = pc.generate_features(n, False, True)
labels, testresults = pc.load_classifier_and_test_on_dataset('all_post', feature_data)
response = SegmentationResponse()
response.pointcloud = convert_pointcloud_to_ROS(pc.pts3d_bound, pc.intensities_bound, labels)
return response
def segmentation_server():
rospy.init_node('segmentation_server')
s = rospy.Service('segment_pointcloud', Segmentation, segment_pointcloud)
print "Ready to segment pointclouds!"
rospy.spin()
if __name__ == "__main__":
segmentation_server() | [
[
1,
0,
0.3131,
0.0101,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.3131,
0.0101,
0,
0.66,
0.0909,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.3333,
0.0101,
0,
0.... | [
"import roslib; roslib.load_manifest('laser_camera_segmentation')",
"import roslib; roslib.load_manifest('laser_camera_segmentation')",
"import rospy",
"from laser_camera_segmentation.srv._Segmentation import *",
"import sensor_msgs",
"from laser_camera_segmentation.ROS_interface_helper_functions import *... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
class label_object(object):
def __init__(self):
'''
Constructor
'''
self.points = []
self.label = 'object'
def get_points(self):
return self.points
def add_point(self, x):
self.points.append(x)
def set_label(self, label):
self.label = label
def get_label(self):
return self.label
def get_type(self):
if self.label == 'edge' or self.label == 'edge_up' or self.label == 'edge_down':
return 'line'
else:
return 'polygon'
def set_type(self, type):
self.type = type
def delete_last_point(self):
self.points.pop()
def is_empty(self):
return len(self.points)==0 | [
[
3,
0,
0.7164,
0.5821,
0,
0.66,
0,
589,
0,
9,
0,
0,
186,
0,
3
],
[
2,
1,
0.5149,
0.0896,
1,
0.68,
0,
555,
0,
1,
0,
0,
0,
0,
0
],
[
8,
2,
0.5075,
0.0448,
2,
0.85,
... | [
"class label_object(object):\n\n\n def __init__(self):\n '''\n Constructor\n '''\n self.points = []",
" def __init__(self):\n '''\n Constructor\n '''\n self.points = []\n self.label = 'object'",
" '''\n Constructor\n '''",... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
from labeling import label_object, scan_dataset
import hrl_lib.util as ut
import shutil #file operations
class scans_database(object):
'''
classdocs
'''
def __init__(self):
'''
Constructor
'''
self.datasets = []
self.current_index = 0
def load(self, path, filename):
self.filename = filename
self.path = path
#try:
dict = ut.load_pickle(self.path+'/'+self.filename)
#except:
# print 'loading of '+self.path+'/'+filename+' failed. WARNING: it will be overwritten on save()!'
# return
self.datasets = dict['datasets']
def save(self):
dict = {'datasets': self.datasets,'version': 0.1}
#for now: make a backup first:
database_filename = self.path+'/'+self.filename
backup_filename = self.path+'/'+self.filename+'_backup_'+ut.formatted_time()
print 'Backing up old database to ' + backup_filename
shutil.copy(database_filename, backup_filename)
print "Saving: "+database_filename
ut.save_pickle(dict,database_filename)
def get_path(self):
return self.path
def get_dataset(self, index):
self.current_index = index
return self.datasets[index]
def get_dataset_by_id(self, id):
#TODO: faster lookup, probably using a dictionary instead of a list?
for dataset in self.datasets:
if dataset.id == id:
return dataset
return False
def set_internal_pointer_to_dataset(self, id):
self.current_index = 0
for dataset in self.datasets:
if dataset.id == id:
return True
self.current_index += 1
return False
def get_next_dataset(self):
if self.current_index < len(self.datasets) - 1:
self.current_index = self.current_index + 1
return self.datasets[self.current_index]
else:
return False
def get_prev_dataset(self):
if self.current_index > 0:
self.current_index = self.current_index - 1
return self.datasets[self.current_index]
else:
return False
def get_first_dataset(self):
if len(self.datasets) > 0:
self.current_index = 0
return self.datasets[self.current_index]
else:
return False
def get_last_dataset(self):
if len(self.datasets) > 0:
self.current_index = len(self.datasets) - 1
return self.datasets[self.current_index]
else:
return False
def get_count(self):
return len(self.datasets)
def add_dataset(self, dataset):
self.datasets.append(dataset)
def delete_current_dataset(self):
del self.datasets[self.current_index]
dataset = self.get_prev_dataset()
if False != dataset:
return dataset
else:
dataset = self.get_next_dataset()
return dataset #TODO: still fails if there is only one dataset!
def add_attribute_to_every_dataset(self, name):
for dataset in self.datasets:
dataset.dict[name]=''
| [
[
1,
0,
0.2069,
0.0069,
0,
0.66,
0,
948,
0,
2,
0,
0,
948,
0,
0
],
[
1,
0,
0.2138,
0.0069,
0,
0.66,
0.3333,
775,
0,
1,
0,
0,
775,
0,
0
],
[
1,
0,
0.2276,
0.0069,
0,
... | [
"from labeling import label_object, scan_dataset",
"import hrl_lib.util as ut",
"import shutil #file operations",
"class scans_database(object):\n '''\n classdocs\n '''\n\n\n def __init__(self):\n '''",
" '''\n classdocs\n '''",
" def __init__(self):\n '''\n Co... |
#!/usr/bin/python
import roslib; roslib.load_manifest('laser_camera_segmentation')
print 'TEST script!!!'
# Import Psyco if available
try:
import psyco
psyco.full()
print "Psyco loaded"
except ImportError:
pass
import laser_camera_segmentation.processor as processor
import laser_camera_segmentation.configuration as configuration
import time
def getTime():
return '['+time.strftime("%H:%M:%S", time.localtime())+']'
def generate_train_save():
#pc.load_data(id)
#print getTime(), 'generate and save features...'
#pc.generate_save_features(True, True)
pc.load_data(id)
print getTime(), 'train_and_save_Classifiers...'
pc.train_and_save_Classifiers()
def print_test():
print getTime(), 'testing:',pc.feature_type,'k=',pc.feature_neighborhood,'r=',pc.feature_radius
print getTime(), 'start'
cfg = configuration.configuration('/home/martin/robot1_data/usr/martin/laser_camera_segmentation/labeling')
#sc = scanner.scanner(cfg)
pc = processor.processor(cfg)
#generate all features and train Classifiers
id = '2009Oct30_162400'
#pc.classifier_training_size = 1000000000
#pc.feature_neighborhood = 20
#pc.feature_radius = 0.03
#pc.feature_type = 'gaussian_histograms'
#print_test()
#generate_train_save()
#pc.load_Classifiers()
#pc.test_classifiers_on_testset()
#pc.update_test_postprocessing_on_testset()
labels, testresults = pc.load_classifier_and_test_on_dataset('all_post', id)
print 'testresults', testresults
import numpy as np
print np.shape(labels)
print labels
import sys
#sys.exit()
print getTime(), 'done'
| [
[
1,
0,
0.0274,
0.0137,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0274,
0.0137,
0,
0.66,
0.05,
630,
3,
1,
0,
0,
0,
0,
1
],
[
8,
0,
0.0548,
0.0137,
0,
0.66... | [
"import roslib; roslib.load_manifest('laser_camera_segmentation')",
"import roslib; roslib.load_manifest('laser_camera_segmentation')",
"print('TEST script!!!')",
"try:\n import psyco\n psyco.full()\n print(\"Psyco loaded\")\nexcept ImportError:\n pass",
" import psyco",
" psyco.full()",... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
from labeling import label_object
class scan_dataset(object):
'''
classdocs
'''
def __init__(self):
'''
Constructor
'''
self.dict = {}
self.dict['title'] = ''
self.dict['id'] = ''
self.dict['polygons'] = [label_object.label_object()]
self.dict['scan_filename'] = ''
self.dict['image_filename'] = ''
self.dict['image_artag_filename'] = ''
self.dict['surface_id'] = ''
self.dict['surface_height'] = ''
self.dict['camera_height'] = ''
self.dict['camera_angle'] = ''
self.dict['surface_type'] = ''
self.dict['ground_plane_normal'] = ''
self.dict['ground_plane_three_points'] = ''
self.dict['is_training_set'] = False
self.dict['is_test_set'] = False
self.dict['is_labeled'] = False
def __setattr__(self, name, value):
if not name == 'dict':
self.dict[name] = value
else:
object.__setattr__(self, name, value)
def __getattr__(self, name):
if not name == 'dict' and name in self.dict:
return self.dict[name]
else:
return object.__getattribute__(self, name)
| [
[
1,
0,
0.3973,
0.0137,
0,
0.66,
0,
948,
0,
1,
0,
0,
948,
0,
0
],
[
3,
0,
0.7055,
0.5753,
0,
0.66,
1,
727,
0,
3,
0,
0,
186,
0,
3
],
[
8,
1,
0.4521,
0.0411,
1,
0.79,... | [
"from labeling import label_object",
"class scan_dataset(object):\n '''\n classdocs\n '''\n\n\n def __init__(self):\n '''",
" '''\n classdocs\n '''",
" def __init__(self):\n '''\n Constructor\n '''\n self.dict = {}\n self.dict['title'] = ''\n ... |
#!/usr/bin/python
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
import roslib; roslib.load_manifest('laser_camera_segmentation')
from opencv.highgui import cvLoadImage
import sys
import opencv.cv as cv
import opencv.highgui as hg
from PyQt4 import QtGui, QtCore
import shutil #file operations
import os
from dumpObj import dumpObj
from labeling import label_object, scan_dataset, scans_database
#take scans:
import laser_camera_segmentation.scanner as scanner
import laser_camera_segmentation.processor as processor
import laser_camera_segmentation.configuration as configuration
import hrl_lib.util as ut
class labeling_tool(QtGui.QWidget):
draw_widget = None
display_mode = 'image'
display_3d_type = 'height'
def __init__(self, path, parent=None):
self.init_in_progress = True
self.path = path
# load configs for taking scans, etc:
self.config = configuration.configuration(path)
#create scanner and processor when needed:
self.scanner = False
self.processor = False
# load database:
self.scans_database = scans_database.scans_database()
self.scans_database.load(path,'database.pkl')
#get first dataset:
self.current_dataset = self.scans_database.get_dataset(0)
QtGui.QWidget.__init__(self, parent)
self.setWindowTitle('labeling tool')
left_layout = QtGui.QVBoxLayout()
self.draw_widget = draw_widget(self.current_dataset.polygons, self.scans_database.get_path() + '/' + self.current_dataset.image_filename, self)
title_layout = QtGui.QHBoxLayout()
take_scan_button = QtGui.QPushButton('Scan')
take_scan_button.setMaximumWidth(50)
title_layout.addWidget(take_scan_button)
self.connect(take_scan_button, QtCore.SIGNAL('clicked()'), self.slot_take_scan )
take_artag_image_button = QtGui.QPushButton('ARTag')
take_artag_image_button.setMaximumWidth(50)
title_layout.addWidget(take_artag_image_button)
self.connect(take_artag_image_button, QtCore.SIGNAL('clicked()'), self.slot_take_artag_image )
button = QtGui.QPushButton('Import Img')
title_layout.addWidget(button)
self.connect(button, QtCore.SIGNAL('clicked()'), self.slot_import_image )
label = QtGui.QLabel("View: ")
title_layout.addWidget(label)
self.display_3d_button = QtGui.QPushButton('3D')
self.display_3d_button.setMaximumWidth(40)
title_layout.addWidget(self.display_3d_button)
self.connect(self.display_3d_button, QtCore.SIGNAL('clicked()'), self.slot_display_3d )
combobox = QtGui.QComboBox()
combobox.addItem("Height", QtCore.QVariant("height"))
combobox.addItem("Intensities", QtCore.QVariant("intensities"))
#combobox.addItem("objects", QtCore.QVariant("objects"))
combobox.addItem("Labels", QtCore.QVariant("labels"))
combobox.addItem("Classifier range", QtCore.QVariant("range"))
combobox.addItem("Classifier color", QtCore.QVariant("color"))
combobox.addItem("Classifier all", QtCore.QVariant("all"))
combobox.addItem("Classifier all+post", QtCore.QVariant("all_post"))
combobox.addItem("Baseline algo", QtCore.QVariant("baseline"))
combobox.addItem("h", QtCore.QVariant("h"))
combobox.addItem("s", QtCore.QVariant("s"))
combobox.addItem("v", QtCore.QVariant("v"))
self.connect(combobox, QtCore.SIGNAL('currentIndexChanged(int)'), self.slot_update_display_3d_type)
title_layout.addWidget(combobox)
self.display_3d_type_combobox = combobox;
self.display_3d_spheres_button = QtGui.QPushButton('3D_Spheres')
title_layout.addWidget(self.display_3d_spheres_button)
self.connect(self.display_3d_spheres_button, QtCore.SIGNAL('clicked()'), self.slot_display_3d_spheres )
self.display_intensity_button = QtGui.QPushButton('Intensity')
self.display_intensity_button.setMaximumWidth(50)
title_layout.addWidget(self.display_intensity_button)
self.connect(self.display_intensity_button, QtCore.SIGNAL('clicked()'), self.slot_display_intensity )
self.display_features_button = QtGui.QPushButton('Features')
title_layout.addWidget(self.display_features_button)
self.display_features_button.setMaximumWidth(50)
self.connect(self.display_features_button, QtCore.SIGNAL('clicked()'), self.slot_display_features )
self.display_labels_button = QtGui.QPushButton('Labels')
title_layout.addWidget(self.display_labels_button)
self.display_labels_button.setMaximumWidth(50)
self.connect(self.display_labels_button, QtCore.SIGNAL('clicked()'), self.slot_display_labels )
self.display_stats_button = QtGui.QPushButton('Stats')
title_layout.addWidget(self.display_stats_button)
self.display_stats_button.setMaximumWidth(50)
self.connect(self.display_stats_button, QtCore.SIGNAL('clicked()'), self.slot_display_stats )
self.display_global_stats_button = QtGui.QPushButton('Global Stats')
title_layout.addWidget(self.display_global_stats_button)
self.display_global_stats_button.setMaximumWidth(50)
self.connect(self.display_global_stats_button, QtCore.SIGNAL('clicked()'), self.slot_display_global_stats )
self.line_edits = []
self.add_line_edit('Title:',title_layout,'title')
first_dataset_button = QtGui.QPushButton('<<')
first_dataset_button.setMaximumWidth(30)
title_layout.addWidget(first_dataset_button)
self.connect(first_dataset_button, QtCore.SIGNAL('clicked()'), self.slot_first_dataset )
prev_dataset_button = QtGui.QPushButton('<')
prev_dataset_button.setMaximumWidth(30)
title_layout.addWidget(prev_dataset_button)
self.connect(prev_dataset_button, QtCore.SIGNAL('clicked()'), self.slot_prev_dataset )
next_dataset_button = QtGui.QPushButton('>')
next_dataset_button.setMaximumWidth(30)
title_layout.addWidget(next_dataset_button)
self.connect(next_dataset_button, QtCore.SIGNAL('clicked()'), self.slot_next_dataset )
last_dataset_button = QtGui.QPushButton('>>')
last_dataset_button.setMaximumWidth(30)
title_layout.addWidget(last_dataset_button)
self.connect(last_dataset_button, QtCore.SIGNAL('clicked()'), self.slot_last_dataset )
save_button = QtGui.QPushButton('Save')
title_layout.addWidget(save_button)
save_button.setMaximumWidth(50)
self.connect(save_button, QtCore.SIGNAL('clicked()'), self.slot_save )
delete_button = QtGui.QPushButton('Delete')
title_layout.addWidget(delete_button)
delete_button.setMaximumWidth(50)
self.connect(delete_button, QtCore.SIGNAL('clicked()'), self.slot_delete )
self.connect(self.draw_widget, QtCore.SIGNAL('sigPolyChanged'), self.slot_update_polygons)
self.connect(self.draw_widget, QtCore.SIGNAL('sigPolyLabelChanged'), self.slot_update_polygon_label)
self.connect(self.draw_widget, QtCore.SIGNAL('sigDefineGroundPlane'), self.slot_define_ground_plane)
left_layout.addLayout(title_layout)
#second row:
row2_layout = QtGui.QHBoxLayout()
left_layout.addLayout(row2_layout)
label = QtGui.QLabel("Id:")
row2_layout.addWidget(label)
self.id_label = QtGui.QLabel("")
row2_layout.addWidget(self.id_label)
self.add_line_edit('Surface: ID:',row2_layout,'surface_id')
self.add_line_edit('Height',row2_layout,'surface_height')
label = QtGui.QLabel("Type: ")
row2_layout.addWidget(label)
combobox = QtGui.QComboBox()
combobox.addItem("Table Office", QtCore.QVariant("table_office"))
combobox.addItem("Table Dorm", QtCore.QVariant("table_dorm"))
combobox.addItem("Table House", QtCore.QVariant("table_house"))
combobox.addItem("Shelf Office", QtCore.QVariant("shelf_office"))
combobox.addItem("Shelf Dorm", QtCore.QVariant("shelf_dorm"))
combobox.addItem("Shelf House", QtCore.QVariant("shelf_house"))
self.connect(combobox, QtCore.SIGNAL('currentIndexChanged(int)'), self.slot_update_surface_type)
row2_layout.addWidget(combobox)
self.surface_type_combobox = combobox;
self.add_line_edit('Camera: Height:',row2_layout,'camera_height')
self.add_line_edit('Camera: Angle:',row2_layout,'camera_angle')
#####################################
#thrid row:
row3_layout = QtGui.QHBoxLayout()
left_layout.addLayout(row3_layout)
#checkboxes:
button = QtGui.QPushButton("&gen'n'save features")
row3_layout.addWidget(button)
self.connect(button, QtCore.SIGNAL('clicked()'), self.slot_generate_save_features )
checkbox = QtGui.QCheckBox('&Training Set')
row3_layout.addWidget(checkbox)
self.connect(checkbox, QtCore.SIGNAL('stateChanged(int)'), self.slot_update_training_set)
self.checkbox_training_set = checkbox
checkbox = QtGui.QCheckBox('Te&st Set')
row3_layout.addWidget(checkbox)
self.connect(checkbox, QtCore.SIGNAL('stateChanged(int)'), self.slot_update_test_set)
self.checkbox_test_set = checkbox
checkbox = QtGui.QCheckBox('Labels, Groundp. checked')
row3_layout.addWidget(checkbox)
self.connect(checkbox, QtCore.SIGNAL('stateChanged(int)'), self.slot_update_is_labeled)
self.checkbox_is_labeled = checkbox
button = QtGui.QPushButton("Train'n'save Classifiers (training set)")
row3_layout.addWidget(button)
self.connect(button, QtCore.SIGNAL('clicked()'), self.slot_train_and_save_Classifiers )
button = QtGui.QPushButton('Test Classifiers (on current)')
row3_layout.addWidget(button)
self.connect(button, QtCore.SIGNAL('clicked()'), self.slot_test_Classifiers )
button = QtGui.QPushButton('Test Classifiers (on testset)')
row3_layout.addWidget(button)
self.connect(button, QtCore.SIGNAL('clicked()'), self.slot_test_Classifiers_on_testset )
button = QtGui.QPushButton('Load Classifiers')
row3_layout.addWidget(button)
self.connect(button, QtCore.SIGNAL('clicked()'), self.slot_load_Classifiers )
# button = QtGui.QPushButton('Save Classifier')
# row3_layout.addWidget(button)
# self.connect(button, QtCore.SIGNAL('clicked()'), self.slot_save_Classifier )
#####################################
left_layout.addWidget(self.draw_widget)
self.right_layout = QtGui.QVBoxLayout()
self.right_layout.setAlignment(QtCore.Qt.AlignTop)
self.outer_layout = QtGui.QHBoxLayout()
self.outer_layout.addLayout(left_layout)
self.outer_layout.addLayout(self.right_layout)
self.polygon_comboboxes = []
self.add_polygon_combobox()
self.slot_update_polygons(self.current_dataset.polygons,0)
self.setLayout(self.outer_layout)
self.resize(900, 700)
self.load_values_from_dataset()
self.init_in_progress = False
#at startup, display newest:
self.slot_last_dataset()
def slot_update_training_set(self, checkState):
if checkState:
self.current_dataset.is_training_set = True
else:
self.current_dataset.is_training_set = False
def slot_update_test_set(self, checkState):
if checkState:
self.current_dataset.is_test_set = True
else:
self.current_dataset.is_test_set = False
def slot_update_is_labeled(self, checkState):
if checkState:
self.current_dataset.is_labeled = True
else:
self.current_dataset.is_labeled = False
def closeEvent(self, x):
print "Exit: saving database..."
self.slot_save()
def slot_import_image(self):
fileName = QtGui.QFileDialog.getOpenFileName(self,"Open Image", self.path, "Image Files (*.png)")
print "Import image into new dataset:" + fileName
name = ut.formatted_time()
new_dataset = scan_dataset.scan_dataset()
new_dataset.id = name
new_dataset.image_filename = 'data/'+name+'_image.png'
shutil.copy(fileName,self.path+'/'+new_dataset.image_filename)
self.scans_database.add_dataset(new_dataset)
#proceed to new dataset:
while True == self.slot_next_dataset():
pass
def add_line_edit(self,label, layout, variable):
label = QtGui.QLabel(label)
line_edit = QtGui.QLineEdit()
line_edit.setMinimumWidth(80)
self.line_edits.append((line_edit,variable))
layout.addWidget(label)
layout.addWidget(line_edit)
self.connect(line_edit, QtCore.SIGNAL('textEdited (const QString&)'), self.slot_line_edit_changed )
return line_edit
def slot_line_edit_changed(self,text):
if True == self.init_in_progress:
return
for (line_edit, variable) in self.line_edits:
self.current_dataset.dict[variable] = str(line_edit.text())
def slot_next_dataset(self):
dataset = self.scans_database.get_next_dataset()
if False != dataset:
self.current_dataset = dataset
self.load_values_from_dataset()
return True
return False
def slot_prev_dataset(self):
dataset = self.scans_database.get_prev_dataset()
if False != dataset:
self.current_dataset = dataset
self.load_values_from_dataset()
return True
return False
def slot_first_dataset(self):
dataset = self.scans_database.get_first_dataset()
if False != dataset:
self.current_dataset = dataset
self.load_values_from_dataset()
return True
return False
def slot_last_dataset(self):
dataset = self.scans_database.get_last_dataset()
if False != dataset:
self.current_dataset = dataset
self.load_values_from_dataset()
return True
return False
def load_values_from_dataset(self):
self.init_in_progress = True
self.id_label.setText(self.current_dataset.id)
for (line_edit, variable) in self.line_edits:
line_edit.setText(self.current_dataset.dict[variable])
for index, box in enumerate(self.polygon_comboboxes):
if index < len(self.current_dataset.polygons):
print str(index) + " load label:" + self.current_dataset.polygons[index].get_label()
boxindex = box.findData(QtCore.QVariant(self.current_dataset.polygons[index].get_label()))
box.setCurrentIndex(boxindex)
else: #set default to first:
box.setCurrentIndex(0)
box = self.surface_type_combobox
boxindex = box.findData(QtCore.QVariant(self.current_dataset.surface_type))
box.setCurrentIndex(boxindex)
print self.current_dataset.is_training_set
if self.current_dataset.is_training_set:
self.checkbox_training_set.setCheckState(QtCore.Qt.Checked)
else:
self.checkbox_training_set.setCheckState(QtCore.Qt.Unchecked)
if self.current_dataset.is_test_set:
self.checkbox_test_set.setCheckState(QtCore.Qt.Checked)
else:
self.checkbox_test_set.setCheckState(QtCore.Qt.Unchecked)
if self.current_dataset.is_labeled:
self.checkbox_is_labeled.setCheckState(QtCore.Qt.Checked)
else:
self.checkbox_is_labeled.setCheckState(QtCore.Qt.Unchecked)
#hide button if there is no 3d data:
print self.current_dataset.scan_filename
if '' == self.current_dataset.scan_filename:
self.display_3d_button.setEnabled(False)
self.display_3d_spheres_button.setEnabled(False)
self.display_intensity_button.setEnabled(False)
else:
self.display_3d_button.setEnabled(True)
self.display_3d_spheres_button.setEnabled(True)
self.display_intensity_button.setEnabled(True)
self.display_mode = 'image'
self.draw_widget.set_polygons(self.current_dataset.polygons)
self.draw_widget.set_image(self.scans_database.get_path() + '/' + self.current_dataset.image_filename)
self.init_in_progress = False
def slot_take_artag_image(self):
if False == self.scanner:
self.scanner = scanner.scanner(self.config)
if False == self.processor:
self.processor = processor.processor(self.config)
img = self.scanner.take_artag_image()
self.current_dataset.image_artag_filename = self.scanner.save_artag_image(self.current_dataset.id)
self.slot_save() #save for consistency with files
if self.processor.read_artag(img).any():
print "SUCCESS in reading ARTag"
else:
print "FAILURE in reading ARTag - try again!"
def slot_take_scan(self):
#save database, let scanner add dataset, reload it then
self.slot_save()
if False == self.scanner:
self.scanner = scanner.scanner(self.config)
if False == self.processor:
self.processor = processor.processor(self.config)
name = ut.formatted_time()
self.scanner.capture_and_save(name)
#self.processor.load_raw_data(name)
#self.processor.load_metadata(name)
#self.processor.process_raw_data()
#self.processor.save_mapped_image(name)
#self.processor.display_all_data()
print 'scan ' + name + ' taken'
self.scans_database.load(self.path,'database.pkl')
#proceed to new scan:
while True == self.slot_next_dataset():
pass
def slot_display_intensity(self):
if self.display_mode != 'intensities':
if False == self.processor:
self.processor = processor.processor(self.config)
#reset ground plane:
self.current_dataset.ground_plane_normal = ''
self.current_dataset.ground_plane_three_points = ''
self.slot_save()
self.processor.load_data(self.current_dataset.id)
self.processor.process_intensities()
filename = self.processor.save_intensity_image(self.current_dataset.id)
#self.processor.display_intensities()
self.display_mode = 'intensities'
self.draw_widget.set_image(filename)
else:
#display normal image
self.display_mode = 'image'
self.draw_widget.set_image(self.scans_database.get_path() + '/' + self.current_dataset.image_filename)
def slot_display_features(self):
if self.display_mode != 'features':
if False == self.processor:
self.processor = processor.processor(self.config)
self.processor.load_data(self.current_dataset.id)
self.processor.process_intensities()
filename = self.processor.save_intensity_image(self.current_dataset.id)
self.display_mode = 'features'
self.draw_widget.set_image(filename)
else:
#display normal image
self.display_mode = 'image'
self.draw_widget.set_image(self.scans_database.get_path() + '/' + self.current_dataset.image_filename)
def slot_display_labels(self):
if self.display_mode != 'labels':
if False == self.processor:
self.processor = processor.processor(self.config)
self.processor.load_data(self.current_dataset.id)
self.processor.process_labels(self.display_3d_type)
filename = self.processor.save_labels_image(self.display_3d_type)
self.draw_widget.set_image(filename)
self.display_mode = 'labels'
else:
#display normal image
self.draw_widget.set_image(self.scans_database.get_path() + '/' + self.current_dataset.image_filename)
self.display_mode = 'image'
def slot_display_stats(self):
if False == self.processor:
self.processor = processor.processor(self.config)
self.processor.load_data(self.current_dataset.id)
self.processor.display_stats()
def slot_display_global_stats(self):
if False == self.processor:
self.processor = processor.processor(self.config)
self.processor.load_data(self.current_dataset.id)
self.processor.display_stats(True)
def slot_display_3d_spheres(self):
self.slot_display_3d(True)
def slot_display_3d(self, spheres = False):
if False == self.processor:
self.processor = processor.processor(self.config)
#save data first so the processor can load it:
self.slot_save()
self.processor.load_data(self.current_dataset.id)
#self.processor.create_polygon_images()
self.processor.process_raw_data()
#pc.save_mapped_image(name)
self.processor.display_3d(self.display_3d_type, spheres)
def slot_train_and_save_Classifiers(self):
if False == self.processor:
self.processor = processor.processor(self.config)
#save data first so the processor can load it:
self.slot_save()
self.processor.load_data(self.current_dataset.id)
self.processor.train_and_save_Classifiers()
def slot_generate_save_features(self):
if False == self.processor:
self.processor = processor.processor(self.config)
#save data first so the processor can load it:
self.slot_save()
self.processor.load_data(self.current_dataset.id)
self.processor.generate_save_features()
def slot_test_Classifiers(self):
if False == self.processor:
self.processor = processor.processor(self.config)
self.slot_save() #save data first so the processor can load it:
self.processor.load_data(self.current_dataset.id)
self.processor.train_and_save_Classifiers()
self.processor.test_Classifiers()
def slot_test_Classifiers_on_testset(self):
if False == self.processor:
self.processor = processor.processor(self.config)
self.slot_save() #save data first so the processor can load it:
self.processor.load_data(self.current_dataset.id)
self.processor.train_and_save_Classifiers()
self.processor.test_classifiers_on_testset()
def slot_load_Classifiers(self):
if False == self.processor:
self.processor = processor.processor(self.config)
self.processor.load_Classifiers()
def slot_save_Classifier(self):
if False == self.processor:
print 'ERROR: no processor object exists -> no Classifier to save!'
return
self.processor.save_Classifier()
def add_polygon_combobox(self):
combobox = QtGui.QComboBox()
combobox.addItem("Object", QtCore.QVariant("object"))
combobox.addItem("Surface", QtCore.QVariant("surface"))
combobox.addItem("Region of Interest (ROI)", QtCore.QVariant("roi"))
combobox.addItem("Background", QtCore.QVariant("background"))
combobox.addItem("Visible Surface-Edge", QtCore.QVariant("edge"))
combobox.addItem("Wall-Surface-Edge", QtCore.QVariant("edge_up"))
combobox.addItem("Downward-Surface-Edge", QtCore.QVariant("edge_down"))
combobox.setCurrentIndex(0)
self.connect(combobox, QtCore.SIGNAL('currentIndexChanged(int)'), self.slot_update_polygon_labels)
self.polygon_comboboxes.append(combobox)
self.right_layout.addWidget(combobox, QtCore.Qt.AlignTop)
self.slot_update_polygon_labels()
def slot_delete(self):
#delete scan-files:
if os.path.isfile(self.current_dataset.scan_filename):
os.remove(self.path + '/' + self.current_dataset.scan_filename);
if os.path.isfile(self.current_dataset.image_filename):
os.remove(self.path + '/' + self.current_dataset.image_filename);
if os.path.isfile(self.current_dataset.image_artag_filename):
os.remove(self.path + '/' + self.current_dataset.image_artag_filename);
#delete metadata
self.current_dataset = self.scans_database.delete_current_dataset()
self.load_values_from_dataset()
self.slot_save() #save for consistency with files
def slot_save(self):
dumpObj(self.current_dataset)
#for poly in self.draw_widget.get_polygons():
# dumpObj(poly)
#self.slot_update_polygons(self.draw_widget.get_polygons(), 1)
self.scans_database.save()
def slot_update_surface_type(self):
if True == self.init_in_progress:
return
box = self.surface_type_combobox
self.current_dataset.surface_type = str(box.itemData(box.currentIndex()).toString())
def slot_update_display_3d_type(self):
if True == self.init_in_progress:
return
box = self.display_3d_type_combobox
self.display_3d_type = str(box.itemData(box.currentIndex()).toString())
def slot_update_polygon_label(self, index, label):
if True == self.init_in_progress:
return
box = self.polygon_comboboxes[index]
boxindex = box.findData(QtCore.QVariant(label))
box.setCurrentIndex(boxindex)
self.draw_widget.update()
def slot_update_polygon_labels(self):
if True == self.init_in_progress:
return
for index, box in enumerate(self.polygon_comboboxes):
if index < len(self.current_dataset.polygons):
self.current_dataset.polygons[index].set_label(str(box.itemData(box.currentIndex()).toString()))
print str(index) + " xx " + str(box.itemData(box.currentIndex()).toString())
self.draw_widget.update()
def slot_update_polygons(self, polygons, current_index):
while len(self.polygon_comboboxes) < len(polygons):
self.add_polygon_combobox()
#self.polygon_comboboxes[self.current_polygon_index].x()
for index, box in enumerate(self.polygon_comboboxes):
if index < len(polygons):
self.polygon_comboboxes[index].show()
else:
self.polygon_comboboxes[index].hide()
self.update()
def paintEvent(self, event):
painter = QtGui.QPainter()
painter.begin(self)
x = self.polygon_comboboxes[self.draw_widget.get_current_polygon_index()].x()
y = self.polygon_comboboxes[self.draw_widget.get_current_polygon_index()].y()
color = QtGui.QColor(255,0,0)
painter.setPen(color)
painter.setBrush(color)
painter.drawEllipse(QtCore.QRectF(x-8,y+8,6,6))
painter.end()
def get_display_mode(self):
return self.display_mode
def slot_define_ground_plane(self, ground_plane_points):
#assumes that intensity image is loaded in processor!
(self.current_dataset.ground_plane_normal, self.current_dataset.ground_plane_three_points) = self.processor.get_3d_plane_normal(ground_plane_points)
self.slot_display_intensity() #switch back to image mode
class draw_widget(QtGui.QLabel):
ground_plane_points = []
def __init__(self,polygons, image_filename, parent=None):
QtGui.QWidget.__init__(self, parent)
self.scaleFactor = False #init is done later
self.setBackgroundRole(QtGui.QPalette.Base)
#self.setSizePolicy(QtGui.QSizePolicy.Ignored, QtGui.QSizePolicy.Ignored)
self.setSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
self.setScaledContents(True)
self.set_polygons(polygons)
self.set_image(image_filename)
self.setScaleFactor(0.8)
def setScaleFactor(self, f):
self.scaleFactor = f
self.updateImageSize()
def updateImageSize(self):
if self.parent().get_display_mode() == 'intensities' or self.parent().get_display_mode() == 'features':
self.scaleFactor = 1
else:
self.scaleFactor = 0.8
self.parent().resize(900, 700)
self.setMinimumHeight(self.image.height() * self.scaleFactor)
self.setMinimumWidth(self.image.width() * self.scaleFactor)
self.setMaximumHeight(self.image.height() * self.scaleFactor)
self.setMaximumWidth(self.image.width() * self.scaleFactor)
pixmap = QtGui.QPixmap.fromImage(self.image)
self.resize(self.scaleFactor * pixmap.size());
self.setPixmap(pixmap);
def set_polygons(self, polygons):
self.polygons = polygons
self.current_polygon_index = 0
self.update()
self.emit(QtCore.SIGNAL("sigPolyChanged"), self.polygons, self.current_polygon_index)
def set_image(self, filename):
print filename
if os.path.isfile(filename):
self.image = QtGui.QImage(filename)
else:
self.image = QtGui.QImage('noimage.png')
self.updateImageSize()
self.update()
def paintEvent(self, event):
# draw image as label-pixmap
QtGui.QLabel.paintEvent(self,event)
painter = QtGui.QPainter()
painter.begin(self)
if self.parent().get_display_mode() == 'image' or self.parent().get_display_mode() == 'labels':
color = QtGui.QColor(0,0,255)
color_surface = QtGui.QColor(0,255,0)
color_roi = QtGui.QColor(255,255,255)
color_edge = QtGui.QColor(255,255,0)
color_edge_up = QtGui.QColor(255,255,255)
color_edge_down = QtGui.QColor(255,150,255)
color_background = QtGui.QColor(255,0,255)
color_current = QtGui.QColor(255,0,0)
for index, polygon in enumerate(self.polygons):
last_point = (-1,-1)
first = True;
if self.current_polygon_index != index or self.parent().get_display_mode() != 'image':
if polygon.get_label() == 'surface':
painter.setPen(color_surface)
elif polygon.get_label() == 'roi':
painter.setPen(color_roi)
elif polygon.get_label() == 'edge':
painter.setPen(color_edge)
elif polygon.get_label() == 'edge_up':
painter.setPen(color_edge_up)
elif polygon.get_label() == 'edge_down':
painter.setPen(color_edge_down)
elif polygon.get_label() == 'background':
painter.setPen(color_background)
else:
painter.setPen(color)
else:
painter.setPen(color_current)
for point in polygon.get_points():
if False == first:
painter.drawLine(QtCore.QPointF(point[0],point[1]) * self.scaleFactor, QtCore.QPointF(last_point[0],last_point[1]) * self.scaleFactor)
last_point = point
first = False
if (self.parent().get_display_mode() != 'image' or self.current_polygon_index != index ) and polygon.get_type() == 'polygon' and len(polygon.get_points()) :
painter.drawLine(QtCore.QPointF(last_point[0],last_point[1]) * self.scaleFactor, QtCore.QPointF(polygon.get_points()[0][0],polygon.get_points()[0][1]) * self.scaleFactor)
else:
for point in polygon.get_points():
painter.drawEllipse(QtCore.QRectF(point[0] * self.scaleFactor-3,point[1] * self.scaleFactor-3,6,6))
elif self.parent().get_display_mode() == 'intensities':
color = QtGui.QColor(255,0,255)
painter.setPen(color)
for point in self.ground_plane_points:
painter.drawEllipse(QtCore.QRectF(point[0] * self.scaleFactor-3,point[1] * self.scaleFactor-3,6,6))
painter.end()
def mousePressEvent(self,event):
if self.hasFocus():
if self.parent().get_display_mode() == 'image':
if event.button() == QtCore.Qt.LeftButton:
#print 'coords:', x,' ',y
point = (event.x() / self.scaleFactor, event.y() / self.scaleFactor)
self.polygons[self.current_polygon_index].add_point(point)
self.update()
self.emit(QtCore.SIGNAL("sigPolyChanged"), self.polygons, self.current_polygon_index)
if event.button() == QtCore.Qt.RightButton:
if False == self.polygons[self.current_polygon_index].is_empty():
self.polygons[self.current_polygon_index].delete_last_point()
self.update()
self.emit(QtCore.SIGNAL("sigPolyChanged"), self.polygons, self.current_polygon_index)
elif self.parent().get_display_mode() == 'intensities':
point = (event.x() / self.scaleFactor, event.y() / self.scaleFactor)
print 'point:', point
if True == self.parent().processor.check_3d_plane_point(point):
self.ground_plane_points.append(point)
if len(self.ground_plane_points) < 3:
self.update()
else:
self.emit(QtCore.SIGNAL("sigDefineGroundPlane"), self.ground_plane_points)
self.ground_plane_points = []
elif self.parent().get_display_mode() == 'features':
point = (event.x() / self.scaleFactor, event.y() / self.scaleFactor)
if True == self.parent().processor.check_3d_plane_point(point):
print 'point:', point
point3d = self.parent().processor.get_3d_point(point)
print 'point3d',point3d
index = self.parent().processor.get_3d_point_index_in_unrotated(point3d)
self.parent().processor.load_data(self.parent().current_dataset.id)
self.parent().processor.process_raw_data()
self.parent().processor.features.prepare([index])
self.parent().processor.feature_type = 'gaussian_histograms'
fv = self.parent().processor.features.get_featurevector(index,0)
print 'fv',fv
self.parent().processor.display_featurevector(fv)
#reload intensity data for next click
self.parent().processor.load_data(self.parent().current_dataset.id)
self.parent().processor.process_intensities()
#print 'fv:', self.parent().processor.get_point_featurevector(index, self.parent().processor.pts3d_int)
#print 'WARNING: THIS IS NOT WORKING YET BECAUSE OF MISSING INTENSITY INDEX MAPPING FOR GRAZEEFFCT REMOVED PTS'
else:
self.setFocus()
def mouseDoubleClickEvent(self,event):
if self.parent().get_display_mode() == 'image':
if event.button() == QtCore.Qt.LeftButton:
self.start_new_polygon()
self.update()
self.emit(QtCore.SIGNAL("sigPolyChanged"), self.polygons, self.current_polygon_index)
def start_new_polygon(self):
if False == self.polygons[self.current_polygon_index].is_empty():
# if self.current_polygon_index == len(self.polygons) - 1:
self.polygons.append(label_object.label_object()) #last one, append new
self.current_polygon_index = len(self.polygons) - 1
print "new poly index: ", self.current_polygon_index
def delete_empty_polygon(self):
if True == self.polygons[self.current_polygon_index].is_empty():
#and it isn't the only one:
if 1 != len(self.polygons):
del self.polygons[self.current_polygon_index]
if 0 != self.current_polygon_index:
self.current_polygon_index -= 1
print "new poly index: ", self.current_polygon_index
return True
return False
def keyPressEvent(self, event):
key = event.key()
if key == QtCore.Qt.Key_Right:
print 'right'
if self.current_polygon_index < len(self.polygons) - 1:
self.delete_empty_polygon()
self.current_polygon_index += 1
print "czurrent poly index: ", self.current_polygon_index
else:
self.start_new_polygon()
self.parent().slot_update_polygon_labels()
self.update()
self.emit(QtCore.SIGNAL("sigPolyChanged"), self.polygons, self.current_polygon_index)
elif key == QtCore.Qt.Key_Left:
print 'left'
if self.current_polygon_index > 0:
if False == self.delete_empty_polygon():
self.current_polygon_index -= 1
print "current poly index: ", self.current_polygon_index
self.update()
self.emit(QtCore.SIGNAL("sigPolyChanged"), self.polygons, self.current_polygon_index)
elif key == QtCore.Qt.Key_O:
print 'o'
self.emit(QtCore.SIGNAL("sigPolyLabelChanged"), self.current_polygon_index, 'object')
elif key == QtCore.Qt.Key_S:
print 's'
self.emit(QtCore.SIGNAL("sigPolyLabelChanged"), self.current_polygon_index, 'surface')
elif key == QtCore.Qt.Key_R:
print 'r'
self.emit(QtCore.SIGNAL("sigPolyLabelChanged"), self.current_polygon_index, 'roi')
elif key == QtCore.Qt.Key_B:
print 'b'
self.emit(QtCore.SIGNAL("sigPolyLabelChanged"), self.current_polygon_index, 'background')
elif key == QtCore.Qt.Key_E:
print 'e'
self.emit(QtCore.SIGNAL("sigPolyLabelChanged"), self.current_polygon_index, 'edge')
elif key == QtCore.Qt.Key_U:
print 'u'
self.emit(QtCore.SIGNAL("sigPolyLabelChanged"), self.current_polygon_index, 'edge_up')
elif key == QtCore.Qt.Key_D:
print 'd'
self.emit(QtCore.SIGNAL("sigPolyLabelChanged"), self.current_polygon_index, 'edge_down')
elif key == QtCore.Qt.Key_Plus:
print '+'
self.setScaleFactor(self.scaleFactor * 1.25)
self.update()
elif key == QtCore.Qt.Key_Minus:
print '-'
self.setScaleFactor(self.scaleFactor * 0.8)
self.update()
else:
QtGui.QWidget.keyPressEvent(self, event)
def get_polygons(self):
return self.polygons
def get_current_polygon_index(self):
return self.current_polygon_index
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
labeling_tool = labeling_tool('/home/martin/robot1_data/usr/martin/laser_camera_segmentation/labeling');#
labeling_tool.show()
sys.exit(app.exec_())
| [
[
1,
0,
0.0302,
0.001,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0302,
0.001,
0,
0.66,
0.0588,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.0322,
0.001,
0,
0.66,... | [
"import roslib; roslib.load_manifest('laser_camera_segmentation')",
"import roslib; roslib.load_manifest('laser_camera_segmentation')",
"from opencv.highgui import cvLoadImage",
"import sys",
"import opencv.cv as cv",
"import opencv.highgui as hg",
"from PyQt4 import QtGui, QtCore",
"import shutil #fi... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
import roslib; roslib.load_manifest('laser_camera_segmentation')
from labeling import label_object, scan_dataset, scans_database
import copy
##WARNING! THIS DOES A PARTIAL COPY OF A DATABASE! BACKUP BEFOREHAND AND KNOW WHAT YOU'RE DOING!!##
path = '/home/martin/robot1_data/usr/martin/laser_camera_segmentation/labeling'
f1 = 'database_changed.pkl'
db1 = scans_database.scans_database()
db1.load(path, f1)
f2 = 'database.pkl'
db2 = scans_database.scans_database()
db2.load(path, f2)
#assume db2 is equal or larger, copy changes from db1 to db2!
d2 = db2.get_first_dataset()
d1 = db1.get_first_dataset()
while False != d1:
if False != d2 and d1.id == d2.id:
print 'copy', d1.id
d2.dict = copy.deepcopy(d1.dict)
d2 = db2.get_next_dataset()
d1 = db1.get_next_dataset()
db2.save()
| [
[
1,
0,
0.4915,
0.0169,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.4915,
0.0169,
0,
0.66,
0.0714,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.5085,
0.0169,
0,
0.... | [
"import roslib; roslib.load_manifest('laser_camera_segmentation')",
"import roslib; roslib.load_manifest('laser_camera_segmentation')",
"from labeling import label_object, scan_dataset, scans_database",
"import copy",
"path = '/home/martin/robot1_data/usr/martin/laser_camera_segmentation/labeling'",
"f1 =... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
import roslib; roslib.load_manifest('laser_camera_segmentation')
from labeling import label_object, scan_dataset, scans_database
scans_database = scans_database.scans_database()
path = '/home/martin/robot1_data/usr/martin/laser_camera_segmentation/labeling'
filename = 'database.pkl'
scans_database.load(path, filename)
scans_database.add_attribute_to_every_dataset('ransac_table_plane')
#dataset = scan_dataset.scan_dataset()
#dataset.name = 'test2'
#dataset.scan_filename = ''
#dataset.image_filename = 'data/2009Oct01_112328_image.png'
#scans_database.add_dataset(dataset)
scans_database.save()
| [
[
1,
0,
0.617,
0.0213,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.617,
0.0213,
0,
0.66,
0.125,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.6383,
0.0213,
0,
0.66,... | [
"import roslib; roslib.load_manifest('laser_camera_segmentation')",
"import roslib; roslib.load_manifest('laser_camera_segmentation')",
"from labeling import label_object, scan_dataset, scans_database",
"scans_database = scans_database.scans_database()",
"path = '/home/martin/robot1_data/usr/martin/laser_ca... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
import roslib; roslib.load_manifest('laser_camera_segmentation')
from labeling import label_object, scan_dataset, scans_database
scans_database = scans_database.scans_database()
scans_database.path = '/home/martin/robot1_data/usr/martin/laser_camera_segmentation/labeling'
scans_database.filename = 'database.pkl'
dataset = scan_dataset.scan_dataset()
dataset.title = 'emtpy'
dataset.id = '0'
dataset.surface_id = '-1'
dataset.scan_filename = ''
dataset.image_filename = ''
scans_database.add_dataset(dataset)
#dataset = scan_dataset.scan_dataset()
#dataset.name = 'test2'
#dataset.scan_filename = ''
#dataset.image_filename = 'data/2009Oct01_112328_image.png'
#scans_database.add_dataset(dataset)
scans_database.save()
| [
[
1,
0,
0.5472,
0.0189,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.5472,
0.0189,
0,
0.66,
0.0769,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.566,
0.0189,
0,
0.6... | [
"import roslib; roslib.load_manifest('laser_camera_segmentation')",
"import roslib; roslib.load_manifest('laser_camera_segmentation')",
"from labeling import label_object, scan_dataset, scans_database",
"scans_database = scans_database.scans_database()",
"scans_database.path = '/home/martin/robot1_data/usr/... |
## Copyright (c) 2004-2007, Andrew D. Straw. All rights reserved.
## Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above
## copyright notice, this list of conditions and the following
## disclaimer in the documentation and/or other materials provided
## with the distribution.
## * Neither the name of the Andrew D. Straw nor the names of its
## contributors may be used to endorse or promote products derived
## from this software without specific prior written permission.
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
##Original code was modified, PlaneLeastSquaresModel() added:
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
import numpy
import numpy as np
import scipy # use numpy if scipy unavailable
import scipy.linalg # use numpy if scipy unavailable
def ransac(data,model,n,k,t,d,debug=False,return_all=False):
print 'INFO: running RANSAC for k=',k,'iterations'
"""fit model parameters to data using the RANSAC algorithm
This implementation written from pseudocode found at
http://en.wikipedia.org/w/index.php?title=RANSAC&oldid=116358182
{{{
Given:
data - a set of observed data points
model - a model that can be fitted to data points
n - the minimum number of data values required to fit the model
k - the maximum number of iterations allowed in the algorithm
t - a threshold value for determining when a data point fits a model
d - the number of close data values required to assert that a model fits well to data
Return:
bestfit - model parameters which best fit the data (or nil if no good model is found)
iterations = 0
bestfit = nil
besterr = something really large
while iterations < k {
maybeinliers = n randomly selected values from data
maybemodel = model parameters fitted to maybeinliers
alsoinliers = empty set
for every point in data not in maybeinliers {
if point fits maybemodel with an error smaller than t
add point to alsoinliers
}
if the number of elements in alsoinliers is > d {
% this implies that we may have found a good model
% now test how good it is
bettermodel = model parameters fitted to all points in maybeinliers and alsoinliers
thiserr = a measure of how well model fits these points
if thiserr < besterr {
bestfit = bettermodel
besterr = thiserr
}
}
increment iterations
}
return bestfit
}}}
"""
# iterations = 0
# bestfit = None
# besterr = numpy.inf
# best_inlier_idxs = None
# while iterations < k:
# maybe_idxs, test_idxs = random_partition(n,data.shape[0])
# print n
# maybeinliers = data[maybe_idxs,:]
# #print 'z',maybeinliers
# test_points = data[test_idxs]
# maybemodel = model.fit(maybeinliers)
# test_err = model.get_error( test_points, maybemodel)
# also_idxs = test_idxs[test_err < t] # select indices of rows with accepted points
# alsoinliers = data[also_idxs,:]
# if debug:
# print 'test_err.min()',test_err.min()
# print 'test_err.max()',test_err.max()
# print 'numpy.mean(test_err)',numpy.mean(test_err)
# print 'iteration %d:len(alsoinliers) = %d'%(
# iterations,len(alsoinliers))
# if len(alsoinliers) > d:
# print numpy.asmatrix(maybeinliers), numpy.asmatrix(alsoinliers)
# betterdata = numpy.concatenate( (maybeinliers, numpy.asmatrix(alsoinliers)) )
# bettermodel = model.fit(numpy.asarray(betterdata))
# better_errs = model.get_error( betterdata, bettermodel)
# thiserr = numpy.mean( better_errs )
# if thiserr < besterr:
# bestfit = bettermodel
# besterr = thiserr
# print maybe_idxs, also_idxs
# best_inlier_idxs = numpy.concatenate( (maybe_idxs, [also_idxs]) )
# iterations+=1
# if bestfit is None:
# raise ValueError("did not meet fit acceptance criteria")
# if return_all:
# return bestfit, {'inliers':best_inlier_idxs}
# else:
# return bestfit
iterations = 0
bestfit = None
besterr = numpy.inf
best_inlier_idxs = None
while iterations < k:
#print data
maybe_idxs, test_idxs = random_partition(n,data.shape[0])
maybeinliers = data[maybe_idxs,:]
test_points = data[test_idxs]
maybemodel = model.fit(maybeinliers)
test_err = model.get_error( test_points, maybemodel)
also_idxs = test_idxs[test_err < t] # select indices of rows with accepted points
alsoinliers = data[also_idxs,:]
if debug:
print 'test_err.min()',test_err.min()
print 'test_err.max()',test_err.max()
print 'numpy.mean(test_err)',numpy.mean(test_err)
print 'iteration %d:len(alsoinliers) = %d'%(
iterations,len(alsoinliers))
if len(alsoinliers) > d:
betterdata = numpy.concatenate( (maybeinliers, alsoinliers) )
bettermodel = model.fit(betterdata)
better_errs = model.get_error( betterdata, bettermodel)
thiserr = numpy.mean( better_errs )
if thiserr < besterr:
bestfit = bettermodel
besterr = thiserr
best_inlier_idxs = numpy.concatenate( (maybe_idxs, also_idxs) )
iterations+=1
if bestfit is None:
raise ValueError("did not meet fit acceptance criteria")
if return_all:
return bestfit, {'inliers':best_inlier_idxs}
else:
return bestfit
def random_partition(n,n_data):
"""return n random rows of data (and also the other len(data)-n rows)"""
all_idxs = numpy.arange( n_data )
numpy.random.shuffle(all_idxs)
idxs1 = all_idxs[:n]
idxs2 = all_idxs[n:]
return idxs1, idxs2
class LinearLeastSquaresModel:
"""linear system solved using linear least squares
This class serves as an example that fulfills the model interface
needed by the ransac() function.
"""
def __init__(self,input_columns,output_columns,debug=False):
self.input_columns = input_columns
self.output_columns = output_columns
self.debug = debug
def fit(self, data):
A = numpy.vstack([data[:,i] for i in self.input_columns]).T
B = numpy.vstack([data[:,i] for i in self.output_columns]).T
x,resids,rank,s = scipy.linalg.lstsq(A,B)
return x
def get_error( self, data, model):
A = numpy.vstack([data[:,i] for i in self.input_columns]).T
B = numpy.vstack([data[:,i] for i in self.output_columns]).T
B_fit = scipy.dot(A,model)
err_per_point = numpy.sum((B-B_fit)**2,axis=1) # sum squared error per row
print err_per_point
return err_per_point
class PlaneLeastSquaresModel:
def __init__(self, debug=False):
self.debug = debug
def fit(self, data):
#print 'fit',data
model = [data[0],numpy.cross(data[1] - data[0], data[2] - data[1])] #point, normal
model[1] = model[1] / numpy.linalg.norm(model[1]) #normalize
return model
def get_error( self, data, model):
#reject model if it's not horizontal
max_angle = 30.0 * np.pi / 180.0
angle = np.arccos(scipy.dot(np.array([0,0,1]),model[1].T)) #normal is normalized
#print 'angle', angle / np.pi * 180.0
if abs(angle) > max_angle:
return np.ones(np.shape(data)[0]) * 999999999999999999999999999999
#http://de.wikipedia.org/wiki/Hessesche_Normalform
#print model[0], model[1]
d = scipy.dot(model[0],model[1].T)
#print 'd',d
s = scipy.dot(data, model[1].T) - d
#print 'dmds',data, model, d, 's',s
#err_per_point = numpy.sum(numpy.asarray(s)**2, axis=1) # sum squared error per row
#return err_per_point
return abs(s)
def test():
# generate perfect input data
n_samples = 500
n_inputs = 1
n_outputs = 1
A_exact = 20*numpy.random.random((n_samples,n_inputs) )
perfect_fit = 60*numpy.random.normal(size=(n_inputs,n_outputs) ) # the model
B_exact = scipy.dot(A_exact,perfect_fit)
assert B_exact.shape == (n_samples,n_outputs)
# add a little gaussian noise (linear least squares alone should handle this well)
A_noisy = A_exact + numpy.random.normal(size=A_exact.shape )
B_noisy = B_exact + numpy.random.normal(size=B_exact.shape )
if 1:
# add some outliers
n_outliers = 100
all_idxs = numpy.arange( A_noisy.shape[0] )
numpy.random.shuffle(all_idxs)
outlier_idxs = all_idxs[:n_outliers]
non_outlier_idxs = all_idxs[n_outliers:]
A_noisy[outlier_idxs] = 20*numpy.random.random((n_outliers,n_inputs) )
B_noisy[outlier_idxs] = 50*numpy.random.normal(size=(n_outliers,n_outputs) )
# setup model
all_data = numpy.hstack( (A_noisy,B_noisy) )
input_columns = range(n_inputs) # the first columns of the array
output_columns = [n_inputs+i for i in range(n_outputs)] # the last columns of the array
debug = False
model = LinearLeastSquaresModel(input_columns,output_columns,debug=debug)
linear_fit,resids,rank,s = scipy.linalg.lstsq(all_data[:,input_columns],
all_data[:,output_columns])
# run RANSAC algorithm
ransac_fit, ransac_data = ransac(all_data,model,
50, 1000, 7e3, 300, # misc. parameters
debug=debug,return_all=True)
if 1:
import pylab
sort_idxs = numpy.argsort(A_exact[:,0])
A_col0_sorted = A_exact[sort_idxs] # maintain as rank-2 array
if 1:
pylab.plot( A_noisy[:,0], B_noisy[:,0], 'k.', label='data' )
pylab.plot( A_noisy[ransac_data['inliers'],0], B_noisy[ransac_data['inliers'],0], 'bx', label='RANSAC data' )
else:
pylab.plot( A_noisy[non_outlier_idxs,0], B_noisy[non_outlier_idxs,0], 'k.', label='noisy data' )
pylab.plot( A_noisy[outlier_idxs,0], B_noisy[outlier_idxs,0], 'r.', label='outlier data' )
pylab.plot( A_col0_sorted[:,0],
numpy.dot(A_col0_sorted,ransac_fit)[:,0],
label='RANSAC fit' )
pylab.plot( A_col0_sorted[:,0],
numpy.dot(A_col0_sorted,perfect_fit)[:,0],
label='exact system' )
pylab.plot( A_col0_sorted[:,0],
numpy.dot(A_col0_sorted,linear_fit)[:,0],
label='linear fit' )
pylab.legend()
pylab.show()
def testPlane():
debug = True
model = PlaneLeastSquaresModel(debug)
data = numpy.array([[0,0,0],[0,1,0],[0.1,12,0.1],[0,0,12],[1,0,0],[1,2,13]])
# run RANSAC algorithm
ransac_fit, ransac_data = ransac(data,model,
3, 1000, 1, 2, # misc. parameters
debug=debug,return_all=True)
print ransac_fit
print ransac_data
def testPlanePointcloud():
import roslib; roslib.load_manifest('laser_camera_segmentation')
import laser_camera_segmentation.processor as processor
import laser_camera_segmentation.configuration as configuration
cfg = configuration.configuration('/home/martin/robot1_data/usr/martin/laser_camera_segmentation/labeling')
#sc = scanner.scanner(cfg)
pc = processor.processor(cfg)
#pc.load_data('2009Oct30_162400')
pc.load_data('2009Nov04_141226')
pc.process_raw_data()
debug = False
model = PlaneLeastSquaresModel(debug)
data = numpy.asarray(pc.pts3d_bound).T
# run RANSAC algorithm
ransac_fit, ransac_data = ransac(data,model,
3, 1000, 0.02, 300, # misc. parameters
debug=debug,return_all=True)
print ransac_fit
print ransac_data
print 'len inlier',len(ransac_data['inliers']),'shape pts',np.shape(pc.pts3d_bound)
pc.pts3d_bound = pc.pts3d_bound[:,ransac_data['inliers']]
pc.display_3d('height')
if __name__=='__main__':
#testPlane()
testPlanePointcloud()
| [
[
1,
0,
0.1111,
0.0031,
0,
0.66,
0,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.1142,
0.0031,
0,
0.66,
0.0909,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.1173,
0.0031,
0,
... | [
"import numpy",
"import numpy as np",
"import scipy # use numpy if scipy unavailable",
"import scipy.linalg # use numpy if scipy unavailable",
"def ransac(data,model,n,k,t,d,debug=False,return_all=False):\n print('INFO: running RANSAC for k=',k,'iterations')\n \"\"\"fit model parameters to data using ... |
#!/usr/bin/python
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Travis Deyle (Healthcare Robotics Lab, Georgia Tech.)
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
import time
import hrl_lib.util as ut
import opencv as cv
import opencv.highgui as hg
import pycessing as pyc
import transforms as tr
import numpy as np,math
import tilting_hokuyo.processing_3d as p3d
#import math_util as mu
def scale(image, s):
scaled = cv.cvCreateImage(cv.cvSize(int(image.width * s), int(image.height * s)), image.depth, image.nChannels)
cv.cvResize(image, scaled, cv.CV_INTER_AREA)
return scaled
class Callib():
def __init__(self, transFunc, seeds, deltas, names, pts, img, cam_proj_mat, cam_centers, zoom, id):
'''
transFunc => takes in param vector, returns homogeneous Xform
seeds => initial param vector (1XM array)
deltas => step sizes in param vector quantities (1XM array)
pts => 3D points to be mapped into img (3XN array)
img => an OpenCV image from the camera
'''
self.dataset_id = id
# Note that adaptors are deprecated...
imgTmp = cv.cvCloneImage(img)
imNP = cv.adaptors.Ipl2NumPy(imgTmp)
self.height = imNP.shape[0] * zoom
self.width = imNP.shape[1] * zoom
self.zoom = zoom
pyc.size(self.width + 200, self.height)
#pyc.size(self.height + 200, self.width + 200)
self.pts = pts
self.img = img
self.vals = seeds
self.selected = np.zeros( self.vals.shape[0] )
self.selected[0] = 1.0
self.dels = deltas
self.names = names
self.transFunc = transFunc
self.cam_proj_mat = cam_proj_mat
self.cam_centers = cam_centers
self.display_laser = True
def reDraw(self):
pyc.background(255)
pyc.lightSpecular(255*30/640, 255*30/640, 255*30/640)
pyc.directionalLight(255,255,255,0,0,1)
pyc.specular(102, 102, 102)
# Project self.pts into image, and display it
imgTmp = cv.cvCloneImage(self.img)
imNP = cv.adaptors.Ipl2NumPy(scale(imgTmp, self.zoom))
color_list = [(255,255,0),(255,0,0),(0,255,255),(0,255,0),(0,0,255),(0,100,100),(100,100,0),
(100,0,100),(100,200,100),(200,100,100),(100,100,200),(100,0,200),(0,200,100),
(0,100,200),(200,0,100),(100,0,100),(255,152,7) ]
XformPts = tr.transform( self.transFunc(self.vals), self.pts )
camPts = self.cam_proj_mat * tr.xyzToHomogenous(XformPts)
camPts = camPts / camPts[2]
camPts[0] = (camPts[0] + self.cam_centers[0]) * self.zoom
camPts[1] = (camPts[1] + self.cam_centers[1]) * self.zoom
camPts = np.matrix( np.round(camPts), 'int')
conditions = np.concatenate([camPts[0] >= 0,
camPts[0] < imNP.shape[1],
camPts[1] >= 0,
camPts[1] < imNP.shape[0]], 0)
r, c = np.where(np.all(conditions, 0))
camPts_bound = camPts[:, c.A[0]]
x = np.asarray(self.pts[0])[0][c.A[0]]
x = x - x.min()
x = x / x.max() * 256 #512 #number of colors
x = np.floor(x)
x = np.asarray(np.matrix(x,'int'))[0]
if self.display_laser:
map2d = np.asarray(camPts_bound[0:2])
n,m = map2d.shape
for i in range(0,m):
imNP[map2d[1,i],map2d[0,i], :] = [x[i],256-x[i],128+x[i]/2]#color_list[x[i]]
imgTmp = cv.adaptors.NumPy2Ipl(imNP)
#imgSmall = cv.cvCreateImage(cv.cvSize(imgTmp.width/3, imgTmp.height/3), cv.IPL_DEPTH_8U, 3)
#cv.cvResize(imgTmp, imgSmall, cv.CV_INTER_AREA)
im = cv.adaptors.Ipl2PIL(imgTmp)
#pyc.rotate(math.radians(90))
pyc.image(im, 0,0, self.width, self.height)
#pyc.rotate(math.radians(-90))
# Display the current values of the parameter vector (and highlight the selected one)
pyc.textSize(10)
for i, val in enumerate(self.vals):
if np.nonzero(self.selected)[0] == i:
print 'x',
print '%8.4f ' % self.vals[i],
pval = '%7s: %8.4f' % (self.names[i], self.vals[i])
pyc.text(pval, self.width + 15, 20 + 20*i, 0)
if np.nonzero(self.selected)[0] == i:
pyc.fill(255,0,0)
pyc.quad(self.width+4.0, 15.0 + 20.0*i - 7.0,
self.width+4.0, 15.0 + 20.0*i + 7.0,
self.width+13.0, 15.0 + 20.0*i,
self.width+13.0, 15.0 + 20.0*i)
print '\n'
self.move(pyc.escape_handler(pyc.draw()))
def move(self, events):
if len(events) > 0:
for event in events:
currselect = np.nonzero( self.selected )[0]
if event.type == pyc.KEYDOWN:
if event.key == pyc.K_DOWN:
self.selected[currselect] = 0
self.selected[ ut.bound(currselect + 1, 0, self.selected.shape[0]-1) ] = 1
if event.key == pyc.K_UP:
self.selected[currselect] = 0
self.selected[ ut.bound(currselect - 1, 0, self.selected.shape[0]-1) ] = 1
if event.key == pyc.K_LEFT:
self.vals[currselect] -= self.dels[currselect]
if event.key == pyc.K_RIGHT:
self.vals[currselect] += self.dels[currselect]
if event.key == pyc.K_SPACE:
self.display_laser = not self.display_laser
if event.key == pyc.K_RETURN:
self.display3d()
return events
def display3d(self):
import laser_camera_segmentation.processor as processor
import laser_camera_segmentation.configuration as configuration
print 'display in 3d...'
cfg = configuration.configuration('/home/martin/robot1_data/usr/martin/laser_camera_segmentation/labeling')
cfg.cam_vec = np.array(self.vals)
import scanr_transforms as trs
cfg.camTlaser = trs.camTlaser(cfg.cam_vec)
pc = processor.processor(cfg)
pc.load_data(self.dataset_id)
pc.process_raw_data()
pc.display_3d('labels', False)
if __name__ == '__main__':
import optparse
p = optparse.OptionParser()
p.add_option('-c', action='store', type='string', dest='image',
default='xxcalib.png',
help='camera image')
p.add_option('-p', action='store', type='string', dest='point_cloud',
default='xxcalib.pkl',
help='pickle file containing a point cloud matrix expressed in the laser\'s frame')
p.add_option('-n', action='store', type='string', dest='camera_name',
default='eleUTM',
help='name of camera as specified in camera_config.py')
opt, args = p.parse_args()
image_filename = opt.image
point_cloud_filename = opt.point_cloud
def cameraTlaser(vec):
x, y, z, r1, r2, r3, r4 = vec
disp = np.matrix([x, y, z]).T
rot1 = tr.Rx(math.radians(r1))
rot2 = tr.Rz(math.radians(r2))
rot3 = tr.Rx(math.radians(r3))
rot4 = tr.Rz(math.radians(r4))
rt = rot4 * rot3 * rot2 * rot1
laserTcam = tr.composeHomogeneousTransform(rt, disp)
trans = tr.invertHomogeneousTransform(laserTcam)
return trans
#seeds = np.array([0.018, -0.057, 0.015, 91.2, 90.8, 0.0])
#seeds = np.array([0.013, -0.027, 0.025, 91.2, 92.8, 0.0])
#seeds = np.array([0.013, -0.032, 0.01, 91.4, 93.2, 0.0])
#seeds = np.array([ 0.003, 0.1 , -0.02, 89.8, 89.8, 90.0, 0])
seeds = np.array([ -0.087, 0.105 , 0.01, 89.8, 89.8, 90.0, 0])
#deltas = np.array([0.005, 0.005, 0.005, 0.1, 0.1, 0.1, 0.1])
#seeds = np.array([0.061, 0.032, -0.035, 0.8, 0.9, -1.7, 3.1 ])
deltas = np.array([0.001, 0.001, 0.001, 0.1, 0.1, 0.1, 0.1])
#self.cam_names = ['Ry_0', 'Rz_0', 'Rx_-90', 'Rz_-90', 'dx', 'dy', 'dz']
names = ['x_disp', 'y_disp', 'z_disp', 'rotX', 'rotZ', 'rotX', 'rotZ']
img = hg.cvLoadImage(image_filename)
raw_laser_scans = ut.load_pickle(point_cloud_filename)
#if raw_laser_scans.__class__ == ().__class__:
poses, scans = raw_laser_scans['laserscans'][0]
points_cloud_laser = p3d.generate_pointcloud(poses, scans, math.radians(-180), math.radians(180),
0, .035, max_dist=5, min_dist=.2)
#else:
# points_cloud_laser = raw_laser_scans
import webcam_config as cc
cp = cc.webcam_parameters['DesktopWebcam']
fx = cp['focal_length_x_in_pixels']
fy = cp['focal_length_y_in_pixels']
cam_proj_mat = np.matrix([[fx, 0, 0, 0],
[0, fy, 0, 0],
[0, 0, 1, 0]])
cam_centers = ( cp['optical_center_x_in_pixels'], cp['optical_center_y_in_pixels'] )
c = Callib(cameraTlaser, seeds, deltas, names, points_cloud_laser, img, cam_proj_mat, cam_centers, 1/1.0)
while True:
c.reDraw()
| [
[
1,
0,
0.1325,
0.004,
0,
0.66,
0,
654,
0,
1,
0,
0,
654,
0,
0
],
[
1,
0,
0.1365,
0.004,
0,
0.66,
0.1,
775,
0,
1,
0,
0,
775,
0,
0
],
[
1,
0,
0.1406,
0.004,
0,
0.66,
... | [
"import time",
"import hrl_lib.util as ut",
"import opencv as cv",
"import opencv.highgui as hg",
"import pycessing as pyc",
"import transforms as tr",
"import numpy as np,math",
"import tilting_hokuyo.processing_3d as p3d",
"def scale(image, s):\n scaled = cv.cvCreateImage(cv.cvSize(int(image.wi... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
from classifier import classifier
import hrl_lib.util as ut
import numpy as np
from hrl_lib.util import getTime
import processor
class baseline_classifier(classifier):
'''
classdocs
'''
#def __init__(selfparams):
# '''
# Constructor
# '''
def test(self, feature_data = None):
#test on current scan:
print getTime(), 'test on:', self.processor.scan_dataset.id
if feature_data == None:
filename = self.processor.get_features_filename()
dict = ut.load_pickle(filename)
else:
dict = feature_data
baseline_labels = self.classify_baseline_code()
return baseline_labels, self.test_results(dict, baseline_labels)
def classify_baseline_code(self):
import hrl_tilting_hokuyo.processing_3d as p3d
import hrl_tilting_hokuyo.occupancy_grid_3d as og3d
import hrl_tilting_hokuyo.display_3d_mayavi as d3m
pt = np.matrix(self.processor.point_of_interest).T
#define VOI
width_half = self.processor.voi_width / 2.0
brf = pt+np.matrix([-width_half,-width_half,-width_half]).T
tlb = pt+np.matrix([width_half, width_half, width_half]).T
resolution = np.matrix([0.1,0.1,0.0025]).T
max_dist = 15
min_dist = -15
gr = og3d.occupancy_grid_3d(brf,tlb,resolution)
print 'filling grid...'
gr.fill_grid(self.processor.pts3d_bound)
print '...filled.'
gr.to_binary(1)
l = gr.find_plane_indices(assume_plane=True,hmin=0.3,hmax=2)
z_min = min(l)*gr.resolution[2,0]+gr.brf[2,0]
z_max = max(l)*gr.resolution[2,0]+gr.brf[2,0]
pts = np.asarray(self.processor.pts3d_bound)
conditions_surface = np.multiply(pts[2,:] > z_min, pts[2,:] < z_max)
print 'cf',conditions_surface
conditions_clutter = np.invert(conditions_surface)
conditions_surface = np.multiply(conditions_surface, np.array(self.processor.map_polys) > 0)
print 'cf',conditions_surface
idx_surface = np.where(conditions_surface)
conditions_clutter = np.multiply(conditions_clutter, np.array(self.processor.map_polys) > 0)
idx_clutter = np.where(conditions_clutter)
n, m = np.shape(self.processor.pts3d_bound)
print n,m
labels = np.zeros(m)
print np.shape(labels), labels
print np.shape(idx_surface), idx_surface
labels[idx_surface] = processor.LABEL_SURFACE
labels[idx_clutter] = processor.LABEL_CLUTTER
print labels
return labels
| [
[
1,
0,
0.2752,
0.0092,
0,
0.66,
0,
71,
0,
1,
0,
0,
71,
0,
0
],
[
1,
0,
0.2936,
0.0092,
0,
0.66,
0.2,
775,
0,
1,
0,
0,
775,
0,
0
],
[
1,
0,
0.3028,
0.0092,
0,
0.66,... | [
"from classifier import classifier",
"import hrl_lib.util as ut",
"import numpy as np",
"from hrl_lib.util import getTime",
"import processor",
"class baseline_classifier(classifier):\n '''\n classdocs\n '''\n\n\n #def __init__(selfparams):\n # '''",
" '''\n classdocs\n '''",
... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
class features(object):
'''
classdocs
'''
processor = None
def __init__(self,processor):
'''
Constructor
'''
self.processor = processor
def get_indexvector(self):
return None
#get the feature vector for a specific point
def get_featurevector(self):
return None
def prepare(self, features_k_nearest_neighbors):
return None
| [
[
3,
0,
0.7593,
0.463,
0,
0.66,
0,
479,
0,
4,
0,
0,
186,
0,
0
],
[
8,
1,
0.5741,
0.0556,
1,
0.96,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
1,
0.6481,
0.0185,
1,
0.96,
... | [
"class features(object):\n '''\n classdocs\n '''\n\n\n processor = None",
" '''\n classdocs\n '''",
" processor = None",
" def __init__(self,processor):\n '''\n Constructor\n '''\n self.processor = processor",
" '''\n Constructor\n ... |
#! /usr/bin/env python
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
import scanner
import Processor
import configuration
import hrl_lib.util as ut
# Import Psyco if available
#try:
# import psyco
# psyco.full()
# print "Psyco loaded"
#except ImportError:
# pass
#from labeling import label_object, scan_dataset, scans_database
#database = scans_database.scans_database()
#database.load('/home/martin/robot1_data/usr/martin/laser_camera_segmentation/labeling','database.pkl')
#dataset = scan_dataset.scan_dataset()
#print dataset
cfg = configuration.configuration('/home/martin/robot1_data/usr/martin/laser_camera_segmentation/labeling')
sc = scanner.scanner(cfg)
pc = Processor.Processor(cfg)
name = ut.formatted_time()
name='2009Oct17_122644'
#sc.capture_and_save(name)
pc.load_data(name)
#pc.load_raw_data('pill_table/2009Sep12_142422')
pc.process_raw_data()
pc.save_mapped_image(name)
pc.display_all_data()
print 'done'
| [
[
1,
0,
0.4559,
0.0147,
0,
0.66,
0,
802,
0,
1,
0,
0,
802,
0,
0
],
[
1,
0,
0.4706,
0.0147,
0,
0.66,
0.0769,
732,
0,
1,
0,
0,
732,
0,
0
],
[
1,
0,
0.4853,
0.0147,
0,
... | [
"import scanner",
"import Processor",
"import configuration",
"import hrl_lib.util as ut",
"cfg = configuration.configuration('/home/martin/robot1_data/usr/martin/laser_camera_segmentation/labeling')",
"sc = scanner.scanner(cfg)",
"pc = Processor.Processor(cfg)",
"name = ut.formatted_time()",
"name=... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import numpy as np, math
import hrl_lib.transforms as tr
def residualXform( residuals ):
'''
residuals are np.array([ Rz2, Rx, Rz1, dx, dy, dz ])
returns rotResid, dispResid
'''
rotResid = tr.Rz( residuals[0] ) * tr.Rx( residuals[1] ) * tr.Rz( residuals[2] )
dispResid = np.matrix([ residuals[3], residuals[4], residuals[5] ]).T
return rotResid, dispResid
def camTlaser( res = np.zeros(7) ):
# @ Duke, res = np.array([0.8, 0.9, -1.7, 3.1, 0.061, 0.032, -0.035 ])
rot = tr.Ry( math.radians( 0.0 + res[0] )) * tr.Rz( math.radians( 0.0 + res[1] )) * tr.Rx( math.radians( -90.0 + res[2] )) * tr.Rz( math.radians( -90.0 + res[3]))
disp = np.matrix([ res[4], res[5], res[6] ]).T + np.matrix([ 0.0, 0.0, 0.0 ]).T
return tr.composeHomogeneousTransform(rot, disp)
def rollTtool_pointer( residuals = np.zeros(6) ):
rotResid, dispResid = residualXform( residuals )
rot = rotResid * tr.Rz( math.radians( -10.0 ))
disp = dispResid + np.matrix([ 0.008, 0.0, 0.0 ]).T
return tr.composeHomogeneousTransform(rot, disp)
def rollTtool_MA( residuals = np.zeros(6) ):
rotResid, dispResid = residualXform( residuals )
rot = rotResid * tr.Ry( math.radians( -90.0 ))
disp = dispResid + np.matrix([ 0.0476, 0.0, 0.0 ]).T
return tr.composeHomogeneousTransform(rot, disp)
def panTroll(rollAng, residuals = np.zeros(6) ):
rotResid, dispResid = residualXform( residuals )
rot = rotResid * tr.Rx( -1.0 * rollAng )
disp = dispResid + np.matrix([0.02021, 0.0, 0.04236 ]).T
return tr.composeHomogeneousTransform(rot, disp)
def tiltTpan(panAng, residuals = np.zeros(6) ):
rotResid, dispResid = residualXform( residuals )
rot = rotResid * tr.Rz( -1.0 * panAng )
disp = dispResid + np.matrix([ 0.07124, 0.0, 0.02243 ]).T
return tr.composeHomogeneousTransform(rot, disp)
def laserTtilt(tiltAng, residuals = np.zeros(6) ):
rotResid, dispResid = residualXform( residuals )
rot = rotResid * tr.Ry( +1.0 * tiltAng )
disp = dispResid + np.matrix([ 0.03354, 0.0, 0.23669 ]).T
return tr.composeHomogeneousTransform(rot, disp)
def laserTtool_pointer(rollAng, panAng, tiltAng, residuals = np.zeros([4,6])):
'''
This is specifically for the off-axis laser pointer! Tool coordinate frame will change for each tool.
Here, residuals are 4x6 array where:
res[0] = rollTtool
res[1] = panTroll
res[2] = tiltTpan
res[3] = laserTtilt
'''
res = residuals
return laserTtilt(tiltAng, res[3] ) * tiltTpan(panAng, res[2] ) * panTroll(rollAng, res[1] ) * rollTtool_pointer(res[0])
def tool_pointerTlaser(rollAng, panAng, tiltAng, residuals = np.zeros([4,6])):
return tr.invertHomogeneousTransform( laserTtool_pointer(rollAng, panAng, tiltAng, residuals) )
def laserTtool_MA(rollAng, panAng, tiltAng, residuals = np.zeros([4,6])):
'''
This is specifically for the multi-antenna (MA) tool attachment! Tool coordinate frame will change for each tool.
Here, residuals are 4x6 array where:
res[0] = rollTtool
res[1] = panTroll
res[2] = tiltTpan
res[3] = laserTtilt
'''
res = residuals
return laserTtilt(tiltAng, res[3] ) * tiltTpan(panAng, res[2] ) * panTroll(rollAng, res[1] ) * rollTtool_MA(res[0])
def tool_MATlaser(rollAng, panAng, tiltAng, residuals = np.zeros([4,6])):
return tr.invertHomogeneousTransform( laserTtool_MA(rollAng, panAng, tiltAng, residuals) )
| [
[
1,
0,
0.2569,
0.0092,
0,
0.66,
0,
954,
0,
2,
0,
0,
954,
0,
0
],
[
1,
0,
0.2661,
0.0092,
0,
0.66,
0.0833,
82,
0,
1,
0,
0,
82,
0,
0
],
[
2,
0,
0.3349,
0.0734,
0,
0.... | [
"import numpy as np, math",
"import hrl_lib.transforms as tr",
"def residualXform( residuals ):\n '''\n residuals are np.array([ Rz2, Rx, Rz1, dx, dy, dz ])\n returns rotResid, dispResid\n '''\n rotResid = tr.Rz( residuals[0] ) * tr.Rx( residuals[1] ) * tr.Rz( residuals[2] )\n dispResid = np.m... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
''' Coordinate frames for testing ROS-interface (copied from Cody).
'''
import numpy as np, math
import copy
import hrl_lib.transforms as tr
# dictionary for transforming different coorsinate frames to global coord frame
# global is NOT the world or fixed frame, its just a convenient global frame
_globalT = {
'torso' : None,
'thok0' : None,
'utm0' : None,
'utmcam0': None,
'mecanum': None
}
def create_globalTDict():
""" call the create functions for all the coord frames
"""
createTorsoTransform()
createThok0Transform()
createUtm0Transform()
createMecanumTransform()
def createTorsoTransform():
''' torso frame -> global frame
'''
disp = np.matrix([0.,0.,0.]).T
rot = np.matrix(np.eye(3))
t = tr.composeHomogeneousTransform(rot,disp)
_globalT['torso'] = t
def createThok0Transform():
''' thok0 frame -> global frame
'''
disp = np.matrix([0.,0.,0.09]).T
rot = np.matrix(np.eye(3))
t = tr.composeHomogeneousTransform(rot,disp)
_globalT['thok0'] = t
def createUtm0Transform():
''' utm0 frame -> global frame
'''
disp = copy.copy(tr.getDispSubMat(_globalT['thok0']))
disp[2,0] += 0.055
rot = np.matrix(np.eye(3))
t = tr.composeHomogeneousTransform(rot,disp)
_globalT['utm0'] = t
def createMecanumTransform():
''' mecanum frame -> global frame (ignores the zenither)
'''
disp = np.matrix([-0.25,0.,0.0]).T
rot = np.matrix(np.eye(3))
t = tr.composeHomogeneousTransform(rot,disp)
_globalT['mecanum'] = t
create_globalTDict()
def globalTmecanum(p,floating_vector=False):
''' 3x1 vector from mecanum to global.
'''
p_hom = tr.xyzToHomogenous(p, floating_vector)
p_gl = _globalT['mecanum'] * p_hom
if floating_vector == False:
return p_gl[0:3]/p_gl[3]
else:
return p_gl[0:3]
def mecanumTglobal(p,floating_vector=False):
''' 3x1 vector from global to mecanum.
'''
p_hom = tr.xyzToHomogenous(p, floating_vector)
p_gl = tr.invertHomogeneousTransform(_globalT['mecanum']) * p_hom
if floating_vector == False:
return p_gl[0:3]/p_gl[3]
else:
return p_gl[0:3]
def globalTtorso(p,floating_vector=False):
''' 3x1 vector from torso to global.
'''
p_hom = tr.xyzToHomogenous(p, floating_vector)
p_gl = _globalT['torso'] * p_hom
if floating_vector == False:
return p_gl[0:3]/p_gl[3]
else:
return p_gl[0:3]
def torsoTglobal(p,floating_vector=False):
''' 3x1 vector from global to torso.
'''
p_hom = tr.xyzToHomogenous(p, floating_vector)
p_gl = tr.invertHomogeneousTransform(_globalT['torso']) * p_hom
if floating_vector == False:
return p_gl[0:3]/p_gl[3]
else:
return p_gl[0:3]
def globalTthok0(p,floating_vector=False):
''' 3x1 vector from thok0 to global.
'''
p_hom = tr.xyzToHomogenous(p, floating_vector)
p_gl = _globalT['thok0'] * p_hom
if floating_vector == False:
return p_gl[0:3]/p_gl[3]
else:
return p_gl[0:3]
def thok0Tglobal(p,floating_vector=False):
''' 3x1 vector from global to thok0.
'''
p_hom = tr.xyzToHomogenous(p, floating_vector)
p_gl = tr.invertHomogeneousTransform(_globalT['thok0']) * p_hom
if floating_vector == False:
return p_gl[0:3]/p_gl[3]
else:
return p_gl[0:3]
def globalTutm0(p,floating_vector=False):
''' 3x1 vector from utm0 to global.
'''
p_hom = tr.xyzToHomogenous(p, floating_vector)
p_gl = _globalT['utm0'] * p_hom
if floating_vector == False:
return p_gl[0:3]/p_gl[3]
else:
return p_gl[0:3]
def utm0Tglobal(p,floating_vector=False):
''' 3x1 vector from global to utm0.
'''
p_hom = tr.xyzToHomogenous(p, floating_vector)
p_gl = tr.invertHomogeneousTransform(_globalT['utm0']) * p_hom
if floating_vector == False:
return p_gl[0:3]/p_gl[3]
else:
return p_gl[0:3]
## transformation matrix to go from global to utmcam0 coord frame.
# @param ang - servo angle (in RADIANS)
# @return 4x4 transformation matrix.
def utmcam0Tglobal_mat(ang):
thok0Tglobal_mat = tr.invertHomogeneousTransform(_globalT['thok0'])
# servo angle.
disp = np.matrix([0.,0.,0.]).T
tmat = tr.composeHomogeneousTransform(tr.Ry(ang),disp)*thok0Tglobal_mat
# cameraTlaser from thok_cam_calib.py
x = 0.012
y = -0.056
z = 0.035
r1 = 0.
r2 = 0.
r3 = -0.7
disp = np.matrix([-x,-y,-z]).T
r = tr.Rz(math.radians(-90))*tr.Ry(math.radians(90.))
disp = r*disp
r = r*tr.Rx(math.radians(r1))
r = r*tr.Ry(math.radians(r2))
r = r*tr.Rz(math.radians(r3))
t = tr.composeHomogeneousTransform(r, disp)
tmat = t*tmat
return tmat
## global to utmcam0 coord frame.
# @param p - 3xN np matrix.
# @param ang - servo angle (in RADIANS)
# @param floating_vector - interpretation of p. False -> position vector. True -> floating vector (rotation only).
# @return 3xN np matrix in the new coord frame.
def utmcam0Tglobal(p,ang,floating_vector=False):
t = utmcam0Tglobal_mat(ang)
p_hom = tr.xyzToHomogenous(p, floating_vector)
p_c = t * p_hom
if floating_vector == False:
pt = p_c[0:3]/p_c[3]
else:
pt = p_c[0:3]
return pt
## utmcam0 coord frame to global
# @param p - 3xN np matrix.
# @param ang - servo angle (in RADIANS)
# @param floating_vector - interpretation of p. False -> position vector. True -> floating vector (rotation only).
# @return 3xN np matrix in the new coord frame.
def globalTutmcam0(p,ang,floating_vector=False):
t = utmcam0Tglobal_mat(ang)
t = tr.invertHomogeneousTransform(t)
p_hom = tr.xyzToHomogenous(p, floating_vector)
p_c = t * p_hom
if floating_vector == False:
pt = p_c[0:3]/p_c[3]
else:
pt = p_c[0:3]
return pt
| [
[
8,
0,
0.125,
0.0088,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.136,
0.0044,
0,
0.66,
0.0476,
954,
0,
2,
0,
0,
954,
0,
0
],
[
1,
0,
0.1404,
0.0044,
0,
0.66,
... | [
"''' Coordinate frames for testing ROS-interface (copied from Cody).\n'''",
"import numpy as np, math",
"import copy",
"import hrl_lib.transforms as tr",
"_globalT = {\n 'torso' : None,\n 'thok0' : None,\n 'utm0' : None,\n 'utmcam0': None,\n 'mecanum': None\n}",
"def create_globalTDict():\n... |
#! /usr/bin/env python
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
import scanner
#import processor
import configuration
#import hrl_lib.util as ut
#
#import roslib; roslib.load_manifest('laser_camera_segmentation')
#import hrl_hokuyo.hokuyo_scan as hs
#import hrl_hokuyo.hokuyo_scan as hokuyo_scan
#import opencv as cv
#from opencv import highgui
#
#import pylab
#from matplotlib.patches import Rectangle
#
#my_svm = cv.CvSVM()
##print CvSVM::train(const CvMat* _train_data, const CvMat* _responses, const CvMat* _var_idx=0, const CvMat* _sample_idx=0, CvSVMParams _params=CvSVMParams())
#train_data = cv.cvCreateMat(10,2,cv.CV_32FC1) #CvMat* cvCreateMat(int rows, int cols, int type)
#train_data[0][0] = 1
#train_data[1][0] = 2
#train_data[2][0] = 3
#train_data[3][0] = 4
#train_data[4][0] = 5
#train_data[5][0] = 6
#train_data[6][0] = 7
#train_data[7][0] = 8
#train_data[8][0] = 9
#train_data[9][0] = 10
#train_data[0][1] = 1
#train_data[1][1] = 2
#train_data[2][1] = 3
#train_data[3][1] = 4
#train_data[4][1] = 5
#train_data[5][1] = 6
#train_data[6][1] = 7
#train_data[7][1] = 8
#train_data[8][1] = 9
#train_data[9][1] = 10
#
#for i in range(10):
# print train_data[i][0]
# print train_data[i][1]
# print '###'
#
#responses = cv.cvCreateMat(10,1,cv.CV_32FC1)
#responses[0] = 1
#responses[1] = 1
#responses[2] = 1
#responses[3] = 1
#responses[4] = 1
#responses[5] = 0
#responses[6] = 0
#responses[7] = 0
#responses[8] = 0
#responses[9] = 0
#
#
#params = cv.CvSVMParams()
#params.svm_type = cv.CvSVM.C_SVC
## Type of SVM, one of the following types:
## CvSVM::C_SVC - n-class classification (n>=2), allows imperfect separation of classes with penalty multiplier C for outliers.
## CvSVM::NU_SVC - n-class classification with possible imperfect separation. Parameter nu (in the range 0..1, the larger the value, the smoother the decision boundary) is used instead of C.
## CvSVM::ONE_CLASS - one-class SVM. All the training data are from the same class, SVM builds a boundary that separates the class from the rest of the feature space.
## CvSVM::EPS_SVR - regression. The distance between feature vectors from the training set and the fitting hyperplane must be less than p. For outliers the penalty multiplier C is used.
## CvSVM::NU_SVR - regression; nu is used instead of p.
#params.kernel_type = cv.CvSVM.SIGMOID
##CvSVM::LINEAR - no mapping is done, linear discrimination (or regression) is done in the original feature space. It is the fastest option. d(x,y) = x*y == (x,y)
##CvSVM::POLY - polynomial kernel: d(x,y) = (gamma*(x*y)+coef0)degree
##CvSVM::RBF - radial-basis-function kernel; a good choice in most cases: d(x,y) = exp(-gamma*|x-y|2)
##CvSVM::SIGMOID - sigmoid function is used as a kernel: d(x,y) = tanh(gamma*(x*y)+coef0)
#
#print my_svm.train_auto(train_data, responses,None,None,params)
#print my_svm.get_params()
#test = cv.cvCreateMat(1,2,cv.CV_32FC1)
#test[0] = 6
#test[1] = 8.7878
#print my_svm.predict(test)
#
#import matplotlib.pyplot as plt
#import matplotlib.image as mpimg
#import numpy as np
#
#n = 100
#m = 100
#results = np.array(-1*np.ones((n,m)))
#
#for i in range(n):
# for j in range(m):
# test[0]=i
# test[1]=j
# results[i][j] = my_svm.predict(test)
# #print str(i) + ' ' + str(j) + ' ' + ' -> ' + str(results[i][j])
#
##print results
#
#imgplot = plt.imshow(results, cmap=pylab.cm.gray, interpolation='nearest')
##imgplot = plt.imshow(np.array(train_data).transpose())
##imgscatter = plt.scatter(np.array(train_data)[:,0], np.array(train_data)[:,1])
#plt.show()
#
#
#
##pylab.ion() #interactive
##pylab.figure(figsize=(8,4))
##pylab.hold(True)
##pylab.subplot(121)
##pylab.title('test')
##pylab.imshow(responses, cmap=pylab.cm.gray, interpolation='nearest')
##
##pylab.draw()
#
#
#
##cfg = configuration.configuration('/home/martin/robot1_data/usr/martin/laser_camera_segmentation/labeling')
###sc = scanner.scanner(cfg)
##pc = processor.processor(cfg)
##
###name = ut.formatted_time()
###sc.capture_and_save(name)
###pc.load_raw_data(name)
##
##id = '2009Sep14_095609'
##pc.load_raw_data(id)
##pc.load_metadata(id)
##print pc.scan_dataset.id
##print pc.scan_dataset.polygons
##pc.create_polygon_images()
##pc.process_raw_data()
###pc.save_mapped_image(name)
##pc.display_all_data()
##
##print pc.scan_dataset.polygons[0].cvImage[400]
#
#
##! /usr/bin/env python
#
#
#
cfg = configuration.configuration('/home/martin/robot1_data/usr/martin/laser_camera_segmentation/calib')
cfg.webcam_id = 0
sc = scanner.scanner(cfg)
##pc = processor.processor(cfg)
##
###name = ut.formatted_time()
sc.capture_and_save('calib')
##
##pc.load_raw_data('2009Oct17_114217')
##pc.process_raw_data()
##pc.display_all_data()
#
#
print 'done'
| [
[
1,
0,
0.1684,
0.0053,
0,
0.66,
0,
802,
0,
1,
0,
0,
802,
0,
0
],
[
1,
0,
0.1789,
0.0053,
0,
0.66,
0.1667,
627,
0,
1,
0,
0,
627,
0,
0
],
[
14,
0,
0.9,
0.0053,
0,
0.... | [
"import scanner",
"import configuration",
"cfg = configuration.configuration('/home/martin/robot1_data/usr/martin/laser_camera_segmentation/calib')",
"cfg.webcam_id = 0",
"sc = scanner.scanner(cfg)",
"sc.capture_and_save('calib')",
"print('done')"
] |
#!/usr/bin/env python
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
##import roslib; roslib.load_manifest('laser_camera_segmentation')
##import rospy
##from std_msgs.msg import String
import hrl_tilting_hokuyo.processing_3d as p3d
from enthought.mayavi import mlab
import hrl_lib.util as ut
import numpy as np, math #import numpy.core as np??
import scipy
from scipy import stats
##import scanr as scanr
##import nearesNeighbourGather.NearestLaserPoint as NearestLaserPoint
if __name__ == '__main__':
## print 'test'
## scanr = scanr.scanr()
## scanr.verify_laser_cam_callib()
dict = ut.load_pickle('../../data/2009Aug31_172113_dict.pkl')
pts = p3d.generate_pointcloud(dict['pos_list'],dict['scan_list'], math.radians(-60),math.radians(60),dict['l1'],dict['l2'], min_tilt=math.radians(-20),max_tilt=math.radians(20))
hist = scipy.stats.histogram(pts[2],30)
hist_max_index = hist[0].argmax()
z_min = hist[1] + hist_max_index * hist[2]
z_max = z_min + hist[2]
scalar_list = list()
for x,y,z in np.asarray(pts.T):
#scalar_list.append(x)
if z_min < z < z_max:
scalar_list.append(29)
else:
scalar_list.append(x)
mlab.points3d(pts[0,:].A1,pts[1,:].A1,pts[2,:].A1,scalar_list,mode='point',scale_factor=0.01)#,colormap='winter'
mlab.colorbar()
#scipy.stats.histogram(pts[2],30)[0].argmax()
##mlab.triangular_mesh([[0,0,0]], [[0,1,0]], [[0,1,1]], [(0,1,2)])
mlab.show() | [
[
1,
0,
0.4937,
0.0127,
0,
0.66,
0,
307,
0,
1,
0,
0,
307,
0,
0
],
[
1,
0,
0.5063,
0.0127,
0,
0.66,
0.1667,
226,
0,
1,
0,
0,
226,
0,
0
],
[
1,
0,
0.5443,
0.0127,
0,
... | [
"import hrl_tilting_hokuyo.processing_3d as p3d",
"from enthought.mayavi import mlab",
"import hrl_lib.util as ut",
"import numpy as np, math #import numpy.core as np??",
"import scipy",
"from scipy import stats",
"if __name__ == '__main__':\n\t\t\n##\tprint 'test'\n##\tscanr = scanr.scanr()\n##\tscanr.... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
import numpy as np,math
class configuration(object):
'''
classdocs
'''
def __init__(self, path = '../data/', device = 'desktopScanner'):
'''
set default values
'''
self.path = path
self.pointcloud_max_dist = 5.0
self.pointcloud_min_dist = 0.1
self.device = device
if device == 'desktopScanner':
import webcam_config as cc
self.webcam_id = 1
#most code from travis scanr-class:
# Initialize webcam
self.cam_name = 'DesktopWebcam'
cp = cc.webcam_parameters[self.cam_name]
fx = cp['focal_length_x_in_pixels']
fy = cp['focal_length_y_in_pixels']
self.cam_proj_mat = np.matrix([[fx, 0, 0, 0],
[0, fy, 0, 0],
[0, 0, 1, 0]])
self.cam_centers = ( cp['optical_center_x_in_pixels'], cp['optical_center_y_in_pixels'] )
# cam_vec came from a previous laser_cam_callibration
#self.cam_vec = np.array([0.8, 0.9, -1.7, 3.1, 0.061, 0.032, -0.035 ])
#self.cam_vec = np.array([1.2000, 1.2000 , -1.4000 , 3.6000 , 0.0600 , 0.0330 ,-0.0200])
#self.cam_vec = np.array([0.9000 , 0.8000 , -2.2000 , 3.1000 , 0.0620 , 0.0320, -0.0270 ])
self.cam_vec = np.array([ 1.8000 , 1.7000 , -2.6000 , 4.7500 , 0.0620 , 0.0320 , -0.0270 ])
#self.cam_vec = np.array([ 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ])
self.cam_deltas = np.array([0.1, 0.1, 0.1, 0.1, 0.001, 0.001, 0.001 ])
self.cam_names = ['Ry_0', 'Rz_0', 'Rx_-90', 'Rz_-90', 'dx', 'dy', 'dz']
import scanr_transforms as trs
self.camTlaser = trs.camTlaser(self.cam_vec)
self.scanner_metal_plate_offset = 0.05 #TODO
# Initialize THOK
self.thok_l1 = 0
self.thok_l2 = 0.035
self.thok_tilt_angles = (math.radians(40.0),math.radians(-40.0))
self.thok_devname = '/dev/robot/desktopServos'
self.thok_servonum = 19
self.thok_hoknum = 0
self.thok_scan_speed = math.radians(5.0)
elif device == 'codyRobot':
import hrl_camera.camera_config as cc
self.webcam_id = 0
#values from equilibrium_point_control/lpi.py
self.cam_name = 'mekabotUTM'
cp = cc.camera_parameters[self.cam_name]
fx = cp['focal_length_x_in_pixels']
fy = cp['focal_length_y_in_pixels']
self.cam_proj_mat = np.matrix([[fx, 0, 0, 0],
[0, fy, 0, 0],
[0, 0, 1, 0]])
self.cam_centers = ( cp['optical_center_x_in_pixels'], cp['optical_center_y_in_pixels'] )
#self.camTlaser = mcf.utmcam0Tglobal(mcf.globalTthok0(m),self.image_angle)
# Initialize THOK
self.thok_l1 = 0
self.thok_l2 = -0.055
self.thok_tilt_angles = (math.radians(40.0),math.radians(-40.0))
self.thok_devname = '/dev/robot/servo0'
self.thok_servonum = 5
self.thok_hoknum = 0
self.thok_scan_speed = math.radians(10.0) #speed=10 in lpi
elif device == 'dummyScanner': #just for testing/demonstration without dependencies outside of gt-ros-pkgk
self.webcam_id = 0
#values from equilibrium_point_control/lpi.py
self.cam_name = 'dummyUTM'
import opencv as cv
#values copied from Cody
cp = {'calibration_image_width' : 640.0,
'calibration_image_height' : 480.0,
'focal_length_x_in_pixels' : 362.381,
'focal_length_y_in_pixels' : 362.260,
'optical_center_x_in_pixels' : 275.630,
'optical_center_y_in_pixels' : 267.914,
'lens_distortion_radial_1' : -0.270544,
'lens_distortion_radial_2' : 0.0530850,
'lens_distortion_tangential_1' : 0,
'lens_distortion_tangential_2' : 0,
'opencv_bayer_pattern' : cv.CV_BayerBG2BGR,
'color': True,
'uid': 8520228
}
fx = cp['focal_length_x_in_pixels']
fy = cp['focal_length_y_in_pixels']
self.cam_proj_mat = np.matrix([[fx, 0, 0, 0],
[0, fy, 0, 0],
[0, 0, 1, 0]])
self.cam_centers = ( cp['optical_center_x_in_pixels'], cp['optical_center_y_in_pixels'] )
#self.camTlaser = mcf.utmcam0Tglobal(mcf.globalTthok0(m),self.image_angle)
# Initialize THOK
self.thok_l1 = 0
self.thok_l2 = -0.055
self.thok_tilt_angles = (math.radians(40.0),math.radians(-40.0))
self.thok_devname = '/dev/robot/servo0'
self.thok_servonum = 5
self.thok_hoknum = 0
self.thok_scan_speed = math.radians(10.0) #speed=10 in lpi
else:
print 'ERROR: unknown device',device | [
[
1,
0,
0.189,
0.0061,
0,
0.66,
0,
954,
0,
2,
0,
0,
954,
0,
0
],
[
3,
0,
0.6037,
0.7988,
0,
0.66,
1,
627,
0,
1,
0,
0,
186,
0,
16
],
[
8,
1,
0.2195,
0.0183,
1,
0.64,... | [
"import numpy as np,math",
"class configuration(object):\n '''\n classdocs\n '''\n\n\n def __init__(self, path = '../data/', device = 'desktopScanner'):",
" '''\n classdocs\n '''",
" def __init__(self, path = '../data/', device = 'desktopScanner'):\n \n '''\n set d... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
from classifier import classifier
import opencv as cv
import hrl_lib.util as ut
import numpy as np
from hrl_lib.util import getTime
import os
import processor
import ransac
class boosted_tree_classifier(classifier) :
'''
classdocs
'''
cv_classifier = None
#def __init__(selfparams):
# '''
# Constructor
# '''
def create_train_datastructures(self):
#loop through all marked datasets
self.processor.scan_dataset = self.processor.scans_database.get_dataset(0)
training_set_size = 0
data = []
#get size of training set in total
while False != self.processor.scan_dataset:
if self.processor.scan_dataset.is_training_set:
filename = self.processor.get_features_filename(True)
print 'loading', filename
dict = ut.load_pickle(filename)
# make an equal size of points for each class: use object labels more often:
difference = np.sum(dict['labels'] == processor.LABEL_SURFACE) - np.sum(dict['labels'] == processor.LABEL_CLUTTER)
#print getTime(), filename
#print getTime(), 'surface',np.sum(dict['labels'] == LABEL_SURFACE)
#print getTime(), 'clutter',np.sum(dict['labels'] == LABEL_CLUTTER)
#print getTime(), difference, "difference = np.sum(dict['labels'] == LABEL_SURFACE) - np.sum(dict['labels'] == LABEL_CLUTTER)"
#print getTime(), ''
if difference > 0:
clutter_features = (dict['features'])[np.nonzero(dict['labels'] == processor.LABEL_CLUTTER)]
if len(clutter_features) > 0: #if there are none, do nothin'
dict['set_size'] += difference
dict['features'] = np.vstack((dict['features'], clutter_features[np.random.randint(0,len(clutter_features),size=difference)]))
dict['labels'] = np.hstack((dict['labels'], np.ones(difference) * processor.LABEL_CLUTTER))
elif difference < 0:
surface_features = (dict['features'])[np.nonzero(dict['labels'] == processor.LABEL_SURFACE)]
if len(surface_features) > 0: #if there are none, do nothin'
difference = -difference
dict['set_size'] += difference
dict['features'] = np.vstack((dict['features'], surface_features[np.random.randint(0,len(surface_features),size=difference)]))
dict['labels'] = np.hstack((dict['labels'], np.ones(difference) * processor.LABEL_SURFACE))
training_set_size += dict['set_size']
data.append(dict)
#get next one
self.processor.scan_dataset = self.processor.scans_database.get_next_dataset()
#print getTime(), self.scan_dataset
#create training set:
self.processor.scan_dataset = self.processor.scans_database.get_dataset(0)
current_training_set_index = 0
feature_vector_length = len(self.processor.features.get_indexvector(self.features))
print getTime(), feature_vector_length
#create dataset matrices:
print getTime(), '#training set size ', training_set_size
#deactivate for now:
max_traning_size = 1800000#2040000
#if training_set_size < max_traning_size:
if True:
train_data = cv.cvCreateMat(training_set_size,feature_vector_length,cv.CV_32FC1) #CvMat* cvCreateMat(int rows, int cols, int type)
train_labels = cv.cvCreateMat(training_set_size,1,cv.CV_32FC1)
for dict in data:
for index in range(dict['set_size']):
#only train on surface and clutter
if dict['labels'][index] == processor.LABEL_SURFACE or dict['labels'][index]== processor.LABEL_CLUTTER:
#print getTime(), point3d
#print getTime(), 'fvindexv',self.get_features_indexvector(features)
#print getTime(), 'len', len(self.get_features_indexvector(features))
fv = (dict['features'][index])[self.processor.features.get_indexvector(self.features)]
#print getTime(), 'fv',fv
#print getTime(), np.shape(fv)
for fv_index, fv_value in enumerate(fv):
train_data[current_training_set_index][fv_index] = fv_value
train_labels[current_training_set_index] = dict['labels'][index]
# for fv_index, fv_value in enumerate(fv):
# print getTime(), train_data[current_training_set_index][fv_index]
# print getTime(), '##',train_labels[current_training_set_index],'##'
#print getTime(), 'fv ', fv
#print getTime(), 'tr ',train_data[index]
current_training_set_index = current_training_set_index + 1
#if current_training_set_index % 4096 == 0:
# print getTime(), 'label', dict['labels'][index], 'fv', fv
if current_training_set_index % 16384 == 0:
print getTime(), 'reading features:', current_training_set_index, 'of', training_set_size, '(',(float(current_training_set_index)/float(training_set_size)*100.0),'%)'
else:
print getTime(), 'more than',max_traning_size,'features, sample from them...'
#select 2040000 features:
all_data = []
all_labels = []
for dict in data:
for index in range(dict['set_size']):
if dict['labels'][index] == processor.LABEL_SURFACE or dict['labels'][index]== processor.LABEL_CLUTTER:
fv = (dict['features'][index])[self.processor.features.get_indexvector(self.features)]
all_data += [fv]
all_labels += [dict['labels'][index]]
current_training_set_index = current_training_set_index + 1
if current_training_set_index % 16384 == 0:
print getTime(), 'reading features:', current_training_set_index, 'of', training_set_size, '(',(float(current_training_set_index)/float(training_set_size)*100.0),'%)'
del data
import random
indices = np.array(random.sample(xrange(len(all_labels)),max_traning_size))
all_data = np.asarray(all_data)
all_labels = np.asarray(all_labels)
all_data = all_data[indices]
all_labels = all_labels[indices]
train_data = cv.cvCreateMat(max_traning_size,feature_vector_length,cv.CV_32FC1) #CvMat* cvCreateMat(int rows, int cols, int type)
train_labels = cv.cvCreateMat(max_traning_size,1,cv.CV_32FC1)
for index in range(max_traning_size):
for fv_index, fv_value in enumerate(all_data[index]):
train_data[index][fv_index] = fv_value
train_labels[index] = all_labels[index]
if index % 16384 == 0:
print getTime(), 'setting features:', (float(index)/float(max_traning_size))
print getTime(), 'start training Classifier'
type_mask = cv.cvCreateMat(1, feature_vector_length+1, cv.CV_8UC1)
cv.cvSet( type_mask, cv.CV_VAR_NUMERICAL, 0)
type_mask[feature_vector_length] = cv.CV_VAR_CATEGORICAL
return (train_data, train_labels, type_mask)
def train(self):
#cv_boost_params = cv.CvBoostParams()
#priors = cv.cvCreateMat(1,2,cv.CV_32FC1)
#priors[0] = 10
#priors[1] = 1
#cv_boost_params.max_categories = 2
#cv_boost_params.priors = priors #TODO: activate them
self.cv_classifier = cv.CvDTree() #cv.CvBoost()
train_datastructures = self.create_train_datastructures()
(train_data, train_labels, type_mask) = train_datastructures
print 'WARNING! use CvDTree (single decision trees) for now as load/save works!'#'boost'
print getTime(), self.cv_classifier.train(train_data, cv.CV_ROW_SAMPLE, train_labels, None, None, type_mask )
print getTime(), 'traning finished'
#self.release_train_datastructures(train_datastructures)
#unused - is that necessary in python? how does it work??
def release_train_datastructures(self, train_datastructures):
if None != train_datastructures:
(train_data, train_labels, type_mask) = train_datastructures
cv.cvReleaseMat(train_data)
cv.cvReleaseMat(train_labels)
cv.cvReleaseMat(type_mask)
#test on current scan:
def test(self, feature_data = None):
#test on current scan:
print getTime(), 'test on:', self.processor.scan_dataset.id
if feature_data == None:
filename = self.processor.get_features_filename()
print 'loading', filename
dict = ut.load_pickle(filename)
else:
dict = feature_data
#print getTime(), dict
current_set_size = dict['set_size']
feature_vector_length = len(self.processor.features.get_indexvector(self.features))
print getTime(), feature_vector_length
labels = np.array(np.zeros(len(self.processor.map_polys)))
print 'test: length of labels vector:', len(labels)
test = cv.cvCreateMat(1,feature_vector_length,cv.CV_32FC1)
if current_set_size == 0:
print getTime(), 'ERROR: test dataset is empty!'
return labels, 1, 1, 1
count = 0
for index in dict['point_indices']:
fv = (dict['features'][count])[self.processor.features.get_indexvector(self.features)]
#print getTime(), fv, dict['features'][count]
for fv_index, fv_value in enumerate(fv):
test[fv_index] = fv_value
#print 'class',self.cv_classifier
label = self.cv_classifier.predict(test)
#print label.value
labels[index] = label.value
#print 'tdone'
if count % 4096 == 0:
print getTime(), 'testing:', count, 'of', current_set_size, '(',(float(count)/float(current_set_size)*100.0),'%)'
count += 1
#save for later use for postprocessing:
self.test_feature_dict = dict
self.test_labels = labels
#cv.cvReleaseMat(test)
return labels, self.test_results(dict, labels)
#test() has to be called before to create intermediate results!
def test_postprocess(self):
labels = self.postprocess(self.test_labels)
return labels, self.test_results(self.test_feature_dict, labels)
def postprocess(self, labels):
debug = False
model = ransac.PlaneLeastSquaresModel(debug)
data_idx = np.where(np.asarray(labels) == processor.LABEL_SURFACE)[0]
data = np.asarray(self.processor.pts3d_bound).T[data_idx]
n, _ = np.shape(data)
if n < 5000:
k = 700
else:
k = 2000
# run RANSAC algorithm
ransac_fit, ransac_data = ransac.ransac(data,model,
3, k, 0.04, len(data_idx)/2.5, # misc. parameters
debug=debug,return_all=True)
print 'ransac: model',ransac_fit
print 'ransac:',ransac_data
print 'len inlier',len(ransac_data['inliers']),'shape pts',np.shape(self.processor.pts3d_bound)
#labels[data_idx[ransac_data['inliers']]] = processor.LABEL_CLUTTER #all non-plane pts
fancy = np.zeros(len(np.asarray(labels))).astype(bool)
fancy[data_idx] = True
fancy[data_idx[ransac_data['inliers']]] = False
labels[fancy] = processor.LABEL_CLUTTER #all surface-labeled non-plane pts
#DEBUG:
#from enthought.mayavi import mlab
#inliers = np.asarray(self.processor.pts3d_bound).T[data_idx[ransac_data['inliers']]].T
#mlab.points3d(inliers[0,:],inliers[1,:],inliers[2,:],inliers[0,:],mode='sphere',resolution=8,scale_factor=0.0015,scale_mode='none',scale_factor=0.01,colormap='jet')#,colormap='winter'
#mlab.quiver3d([ransac_fit[0][0]], [ransac_fit[0][1]], [ransac_fit[0][2]], [ransac_fit[1][0]], [ransac_fit[1][1]], [ransac_fit[1][2]], scale_factor=0.4, color=(1,0,0))
return labels
def save(self):
classifier_filename = self.get_filename()
#if file doesn't exist: create it
if False == os.path.isfile(classifier_filename):
open(classifier_filename,'w')
self.cv_classifier.save(classifier_filename)
def load(self):
self.cv_classifier = cv.CvDTree() #cv.CvBoost()
print getTime(), 'loading Classifier',self.features
self.cv_classifier.load(self.get_filename())
| [
[
1,
0,
0.094,
0.0031,
0,
0.66,
0,
71,
0,
1,
0,
0,
71,
0,
0
],
[
1,
0,
0.1003,
0.0031,
0,
0.66,
0.125,
437,
0,
1,
0,
0,
437,
0,
0
],
[
1,
0,
0.1034,
0.0031,
0,
0.66... | [
"from classifier import classifier",
"import opencv as cv",
"import hrl_lib.util as ut",
"import numpy as np",
"from hrl_lib.util import getTime",
"import os",
"import processor",
"import ransac",
"class boosted_tree_classifier(classifier) :\n '''\n classdocs\n '''\n\n cv_classifier = No... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Hai Nguyen (Healthcare Robotics Lab, Georgia Tech.)
import numpy as np
def height_distribution(points, center=None, radius = 1.0):
assert(points.shape[0] == 3)
np.mean(points, axis=1)
| [
[
1,
0,
0.8571,
0.0286,
0,
0.66,
0,
954,
0,
1,
0,
0,
954,
0,
0
],
[
2,
0,
0.9429,
0.0857,
0,
0.66,
1,
629,
0,
3,
0,
0,
0,
0,
1
],
[
8,
1,
0.9714,
0.0286,
1,
0.39,
... | [
"import numpy as np",
"def height_distribution(points, center=None, radius = 1.0):\n assert(points.shape[0] == 3)\n np.mean(points, axis=1)",
" np.mean(points, axis=1)"
] |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
from laser_camera_segmentation.srv._Segmentation import *
import sensor_msgs
import numpy as np
import opencv as cv
import hrl_lib.util as ut
import opencv.highgui as highgui
from labeling.label_object import label_object
def convert_pointcloud_to_ROS(pts3d, intensities = None, labels = None):
ROS_pointcloud = sensor_msgs.msg.PointCloud()
ROS_pointcloud.points = []
for point in np.asarray(pts3d.T):
ROS_pointcloud.points += [geometry_msgs.msg.Point32(point[0],point[1],point[2])]
intensity_channel = sensor_msgs.msg.ChannelFloat32()
intensity_channel.name = 'intensities'
if intensities != None:
for value in intensities:
intensity_channel.values += [value]
label_channel = sensor_msgs.msg.ChannelFloat32()
label_channel.name = 'labels'
if labels != None:
for value in labels:
label_channel.values += [value]
ROS_pointcloud.channels = [intensity_channel, label_channel]
return ROS_pointcloud
def convert_ROS_pointcloud_to_pointcloud(ROS_pointcloud):
intensities = []
labels = []
pts3d = []
for point in ROS_pointcloud.points:
pts3d += [[point.x, point.y, point.z]]
pts3d = np.array(pts3d).T
#print pts3d
for value in ROS_pointcloud.channels[0].values:
intensities += [value]
intensities = np.array(intensities)
for value in ROS_pointcloud.channels[1].values:
labels += [value]
labels = np.array(labels)
return pts3d, intensities, labels
def convert_cvimage_to_ROS(image):
imgTmp = cv.cvCloneImage(image)
imNP = ut.cv2np(imgTmp,format='BGR')
ROS_image = np.reshape(imNP,(1,-1))[0]
return ROS_image
def convert_ROS_image_to_cvimage(ROS_image, width, height):
ROS_image = np.array(ROS_image, dtype='uint8')
imNP = np.reshape(ROS_image,(height,width, 3))
cvimage = ut.np2cv(imNP)
return cvimage
#convert label_object to ROS geometry_msgs.msg.Polygon
def convert_polygon_to_ROS(polygon):
ROS_polygon = geometry_msgs.msg.Polygon()
ROS_polygon.points = []
for point in polygon.get_points():
ROS_polygon.points += [geometry_msgs.msg.Point32(point[0], point[1], 0.)]
return ROS_polygon
def convert_ROS_polygon_to_polygon(ROS_polygon):
polygon = label_object()
for point in ROS_polygon.points:
polygon.add_point((point.x,point.y))
return polygon
| [
[
1,
0,
0.2703,
0.009,
0,
0.66,
0,
322,
0,
1,
0,
0,
322,
0,
0
],
[
1,
0,
0.2793,
0.009,
0,
0.66,
0.0833,
61,
0,
1,
0,
0,
61,
0,
0
],
[
1,
0,
0.2973,
0.009,
0,
0.66,... | [
"from laser_camera_segmentation.srv._Segmentation import *",
"import sensor_msgs",
"import numpy as np",
"import opencv as cv",
"import hrl_lib.util as ut",
"import opencv.highgui as highgui",
"from labeling.label_object import label_object",
"def convert_pointcloud_to_ROS(pts3d, intensities = None, l... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
from hrl_lib.util import getTime
import processor
class classifier(object):
'''
classdocs
'''
processor = None
features = 'all'
def __init__(self, processor, features):
'''
Constructor
'''
self.processor = processor
self.features = features
def train(self):
return None
#abstract
def test(self, feature_data = None):
return None
#dict are the loaded features including the ground truth, labels the algorithm output
def test_results(self, dict, labels):
current_set_size = dict['set_size']
count_correct = 0
count_clutter_correct = 0
count_surface_correct = 0
count_clutter = 0
count_surface = 0
count = 0
for index in dict['point_indices']:
label = labels[index]
if label == dict['labels'][count]:
count_correct += 1
if dict['labels'][count] == processor.LABEL_CLUTTER:
count_clutter += 1
if label == dict['labels'][count]:
count_clutter_correct += 1
if dict['labels'][count] == processor.LABEL_SURFACE:
count_surface += 1
if label == dict['labels'][count]:
count_surface_correct += 1
count += 1
print getTime(), '##########################################'
print getTime(), '####tested on ', self.features, '###########################'
print getTime(), '==================================='
print getTime(), 'percent in total: surface:',(float(count_surface)/float(current_set_size)*100), '%, clutter:',(float(count_clutter)/float(current_set_size)*100),'%'
print getTime(), '#points surface:',count_surface,'clutter:',count_clutter
print getTime(), '#points correct: surface:',count_surface_correct,'clutter:',count_clutter_correct
if count_surface > 0:
percent_surface_correct = float(count_surface_correct)/float(count_surface) * 100
else:
percent_surface_correct = 100
if count_clutter > 0:
percent_clutter_correct = float(count_clutter_correct)/float(count_clutter) * 100
else:
percent_clutter_correct = 100
print getTime(), '#percent correct: surface:',percent_surface_correct,'clutter:',percent_clutter_correct
print getTime(), '==================================='
print getTime(), '##########################################'
testresults = (count_surface, count_clutter,count_surface_correct, count_clutter_correct, percent_surface_correct, percent_clutter_correct)
return testresults
def get_filename(self):
return self.processor.config.path+'/classifier_'+self.features+'_'+self.processor.feature_type+'_k'+str(self.processor.feature_neighborhood)+'_r'+str(self.processor.feature_radius)+'.XML'
def save(self):
return None
def load(self):
return None
| [
[
1,
0,
0.265,
0.0085,
0,
0.66,
0,
775,
0,
1,
0,
0,
775,
0,
0
],
[
1,
0,
0.2821,
0.0085,
0,
0.66,
0.5,
177,
0,
1,
0,
0,
177,
0,
0
],
[
3,
0,
0.641,
0.6923,
0,
0.66,... | [
"from hrl_lib.util import getTime",
"import processor",
"class classifier(object):\n '''\n classdocs\n '''\n\n processor = None\n features = 'all'",
" '''\n classdocs\n '''",
" processor = None",
" features = 'all'",
" def __init__(self, processor, features):\n ''... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Hai Nguyen (Healthcare Robotics Lab, Georgia Tech.)
import hrl_lib.util as ut
import numpy as np
import opencv as cv
import opencv.highgui as hg
##
# calculates eigen values of covariance matrix accumulating statistics of sobel filter responses in an image block
#
# @param cv_image opencv image to calculate texture over
# @param blocksize size of block to accumulate statistics (in pixels)
# @param filtersize size of sobel filter to use (in pixels)
# @return numpy matrix of size (width, height, 2) where [:,:,0] is the set of first eigen values and [:,:,1] is the second set
def eigen_texture(cv_image, blocksize=8, filtersize=3):
gray_image = cv.cvCreateImage(cv.cvSize(cv_image.width, cv_image.height), cv.IPL_DEPTH_8U, 1)
eig_tex = cv.cvCreateImage(cv.cvSize(cv_image.width*6, cv_image.height), cv.IPL_DEPTH_32F, 1)
cv.cvCvtColor(cv_image, gray_image, cv.CV_BGR2GRAY)
cv.cvCornerEigenValsAndVecs(gray_image, eig_tex, blocksize, filtersize)
eig_tex_np = ut.cv2np(eig_tex)
eig_tex_np = np.reshape(eig_tex_np, [cv_image.height, cv_image.width, 6])
return eig_tex_np[:,:,0:2]
def visualize(eigens):
l1 = eigens[:,:,0]
l2 = eigens[:,:,1]
m1 = np.min(l1)
m2 = np.min(l2)
r1 = np.max(l1) - m1
r2 = np.max(l2) - m2
if r1 == 0:
r1 = 1
if r2 == 0:
r2 = 1
l1cv = ut.np2cv(np.array( (1 - ((l1-m1) / r1)) * 255, dtype='uint8'))
l2cv = ut.np2cv(np.array( (1 - ((l2-m2) / r2)) * 255, dtype='uint8'))
hg.cvNamedWindow('eigen value 1', 1)
hg.cvNamedWindow('eigen value 2', 1)
hg.cvShowImage('eigen value 1', l1cv)
hg.cvShowImage('eigen value 2', l2cv)
while True:
k = hg.cvWaitKey(33)
if k == ' ':
return
if k == 'x':
exit()
if __name__ == '__main__':
#import pdb
#hg.cvNamedWindow('win', 1)
im = hg.cvLoadImage('/home/haidai/svn/robot1/src/projects/08_03_dog_commands/dragonfly_color_calibration/untitled folder/camera_image.png')
#hg.cvShowImage('win', im)
for i in range(40):
s = (i+1) * 2
print s
eig_tex_np = eigen_texture(im, blocksize=s, filtersize=3)
visualize(eig_tex_np)
# pdb.set_trace()
# def texture_features(self, block_size=5, filter_size=3):
# """
# Calculates the texture features associated with the image.
# block_size gives the size of the texture neighborhood to be processed
# filter_size gives the size of the Sobel operator used to find gradient information
# """
# #block_size = cv.cvSize(block_size, block_size)
#
# #convert to grayscale float
# channels = 1
# self.gray_image = cv.cvCreateImage(cv.cvSize(self.im_width, self.im_height),
# cv.IPL_DEPTH_8U, #cv.IPL_DEPTH_16U, #cv.IPL_DEPTH_32F,
# channels)
#
#
# #cv.CV_32FC1, #cv.IPL_DEPTH_32F, #cv.IPL_DEPTH_8U, #cv.IPL_DEPTH_16U,
# channels = 1
# eig_tex = cv.cvCreateImage(cv.cvSize(self.im_width*6, self.im_height),
# cv.IPL_DEPTH_32F,
# channels)
#
#
# cv.cvCvtColor(self.image, self.gray_image, cv.CV_BGR2GRAY);
#
# #cv.cvAdd(const CvArr* src1, const CvArr* src2, CvArr* dst, const CvArr* mask=NULL );
#
# #highgui.cvConvertImage(self.image, self.gray_image)
#
# cv.cvCornerEigenValsAndVecs(self.gray_image, eig_tex,#CvArr* eigenvv,
# block_size, filter_size)
#
# eig_tex = ut.cv2np(eig_tex)
# eig_tex = np.reshape(eig_tex, [self.im_height, self.im_width, 6])
# #print eig_tex.shape ## [480,640,3]
# ## (l1, l2, x1, y1, x2, y2), where
# ## l1, l2 - eigenvalues of M; not sorted
# ## (x1, y1) - eigenvector corresponding to l1
# ## (x2, y2) - eigenvector corresponding to l2
# tex_feat = np.zeros([3, self.im_height * self.im_width], dtype=np.float32)
# tmp = np.reshape(eig_tex, [self.im_height * self.im_width, 6]).T
# s = tmp[0] > tmp[1]
# tex_feat[1:3, s] = tmp[0, s] * tmp[2:4, s]
# tex_feat[0, s] = tmp[1, s]
# tex_feat[1:3, -s] = tmp[1, -s] * tmp[4:6, -s]
# tex_feat[0, -s] = tmp[0, -s]
#
# self.tex_feat = tex_feat.T
# self.tex_image = np.reshape(self.tex_feat, [self.im_height, self.im_width, 3])
| [
[
1,
0,
0.1975,
0.0064,
0,
0.66,
0,
775,
0,
1,
0,
0,
775,
0,
0
],
[
1,
0,
0.2038,
0.0064,
0,
0.66,
0.1667,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.2102,
0.0064,
0,
... | [
"import hrl_lib.util as ut",
"import numpy as np",
"import opencv as cv",
"import opencv.highgui as hg",
"def eigen_texture(cv_image, blocksize=8, filtersize=3):\n gray_image = cv.cvCreateImage(cv.cvSize(cv_image.width, cv_image.height), cv.IPL_DEPTH_8U, 1)\n eig_tex = cv.cvCreateImage(cv.cvSize(cv_im... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Travis Deyle (Healthcare Robotics Lab, Georgia Tech.)
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
import roslib; roslib.load_manifest('laser_camera_segmentation')
if __name__ == '__main__':
import scanr_transforms as trs
import transforms as tr
import numpy as np,math
import webcam_config as cc
import opencv as cv
import opencv.highgui as hg
import hrl_lib.util as ut
import tilting_hokuyo.processing_3d as p3d
import laser_cam_callib
import pygame
#seeds = np.array([1.0, 0.9, -1.7, 3.1, 0.061, 0.032, -0.027 ])
#seeds = np.array([0.0, 0.0, -0.0, 0.0, 0.061, 0.032, -0.027 ])
#seeds = np.array([0.8, 0.9, -1.7, 3.1, 0.061, 0.032, -0.035 ])
#seeds = np.array([2.4000 , 3.8000 , -2.9000 , 5.5000 , 0.0600 , 0.0300 , -0.0430 ])
# seeds = np.array([2.2000 , 2.0000 , -2.8000 , 5.5000 , 0.0500 , 0.0300 , -0.0430 ])
# 2.0000 2.0000 -2.8000 5.5000 0.0550 0.0300 x -0.0400
#seeds = np.array([0.9000 , 0.8000 , -2.2000 , 3.1000 , 0.0620 , 0.0320, -0.0270 ])
seeds = np.array([ 1.8000 , 1.7000 , -2.6000 , 4.7500 , 0.0620 , 0.0320 , -0.0270 ])
deltas = np.array([0.1, 0.1, 0.1, 0.1, 0.001, 0.001, 0.001 ])
#-1.0000 x 1.7000 -2.2000 6.4000 -0.0200 0.0300 -0.0430
names = ['Ry_0', 'Rz_0', 'Rx_-90', 'Rz_-90', 'dx', 'dy', 'dz']
def camTlaser( res = np.zeros(6) ):
rot = tr.Ry( math.radians( 0.0 + res[0] )) * tr.Rz( math.radians( 0.0 + res[1] )) * tr.Rx( math.radians( -90.0 + res[2] )) * tr.Rz( math.radians( -90.0 + res[3]))
disp = np.matrix([ res[4], res[5], res[6] ]).T + np.matrix([ 0.0, 0.0, 0.0 ]).T
return tr.composeHomogeneousTransform(rot, disp)
cameraTlaser = trs.camTlaser
cp = cc.webcam_parameters['DesktopWebcam']
fx = cp['focal_length_x_in_pixels']
fy = cp['focal_length_y_in_pixels']
cam_proj_mat = np.matrix([[fx, 0, 0, 0],
[0, fy, 0, 0],
[0, 0, 1, 0]])
cam_centers = ( cp['optical_center_x_in_pixels'], cp['optical_center_y_in_pixels'] )
#take image and scan
import scanner
import configuration
#id = '2009Nov04_144041'
id = '2009Nov04_135319'
cfg = configuration.configuration('/home/martin/robot1_data/usr/martin/laser_camera_segmentation/labeling')
img = hg.cvLoadImage(cfg.path + '/data/' + id + '_image.png')
thok_dict = ut.load_pickle(cfg.path + '/data/' + id + '_laserscans.pkl')
#cfg = configuration.configuration('/home/martin/robot1_data/usr/martin/laser_camera_segmentation/calib')
#cfg.webcam_id = 0
#sc = scanner.scanner(cfg)
#sc.capture_and_save('calib', False)
#img = hg.cvLoadImage('/home/martin/robot1_data/usr/martin/laser_camera_segmentation/calib/data/calib_image.png')
#thok_dict = ut.load_pickle('/home/martin/robot1_data/usr/martin/laser_camera_segmentation/calib/data/calib_laserscans.pkl')
poses, scans = thok_dict['laserscans'][0]
points_cloud_laser = p3d.generate_pointcloud(poses, scans, math.radians(-180), math.radians(180),
0, .035, max_dist=5.0, min_dist=.1)
c = laser_cam_callib.Callib(cameraTlaser, seeds, deltas, names, points_cloud_laser, img, cam_proj_mat, cam_centers,1, id)
while not c.reDraw():
tmp = 1
pygame.quit()
| [
[
1,
0,
0.2768,
0.0089,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.2768,
0.0089,
0,
0.66,
0.5,
630,
3,
1,
0,
0,
0,
0,
1
],
[
4,
0,
0.6429,
0.7054,
0,
0.66,... | [
"import roslib; roslib.load_manifest('laser_camera_segmentation')",
"import roslib; roslib.load_manifest('laser_camera_segmentation')",
"if __name__ == '__main__':\n import scanr_transforms as trs\n import transforms as tr\n import numpy as np,math\n import webcam_config as cc\n import opencv as ... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
from features import features
import laser_camera_segmentation.gaussian_curvature as gaussian_curvature
import scipy.stats as stats
import numpy as np
import opencv as cv
import scipy.spatial.kdtree as kdtree
import hrl_lib.util as ut
from hrl_lib.util import getTime
import os
import laser_camera_segmentation.texture_features as texture_features
import processor
import copy
class gaussian_histogram_features(features):
'''
classdocs
'''
#all_save_load: set to true only if nonzero_indices contain all pts in pt-cloud!
def prepare(self, features_k_nearest_neighbors, nonzero_indices = None, all_save_load = False, regenerate_neightborhood_indices = False):
#print np.shape(self.processor.pts3d_bound), 'shape pts3d_bound'
imgTmp = cv.cvCloneImage(self.processor.img)
self.imNP = ut.cv2np(imgTmp,format='BGR')
self.processor.map2d = np.asarray(self.processor.map[0][0:2]) #copied from laser to image mapping
if features_k_nearest_neighbors == None or features_k_nearest_neighbors == False: #use range
self.kdtree2d = kdtree.KDTree(self.processor.pts3d_bound.T)
#print len(nonzero_indices)
#print np.shape(np.asarray((self.processor.pts3d_bound.T)[nonzero_indices]))
if nonzero_indices != None:
print getTime(), 'query ball tree for ', len(nonzero_indices), 'points'
kdtree_query = kdtree.KDTree((self.processor.pts3d_bound.T)[nonzero_indices])
else:
print getTime(), 'query ball tree'
kdtree_query = kdtree.KDTree(self.processor.pts3d_bound.T)
filename = self.processor.config.path+'/data/'+self.processor.scan_dataset.id+'_sphere_neighborhood_indices_'+str(self.processor.feature_radius)+'.pkl'
if all_save_load == True and os.path.exists(filename) and regenerate_neightborhood_indices == False:
#if its already there, load it:
print getTime(), 'loading',filename
self.kdtree_queried_indices = ut.load_pickle(filename)
else:
self.kdtree_queried_indices = kdtree_query.query_ball_tree(self.kdtree2d, self.processor.feature_radius, 2.0, 0.2) #approximate
print getTime(), 'queried kdtree: ',len(self.kdtree_queried_indices),'points, radius:',self.processor.feature_radius
if all_save_load == True:
ut.save_pickle(self.kdtree_queried_indices, filename)
#make dict out of list for faster operations? (doesn't seem to change speed significantly):
#self.kdtree_queried_indices = dict(zip(xrange(len(self.kdtree_queried_indices)), self.kdtree_queried_indices))
else: #experiemental: use_20_nearest_neighbors == True
#TODO: exclude invalid values in get_featurevector (uncomment code there)
self.kdtree2d = kdtree.KDTree(self.processor.pts3d_bound.T)
self.kdtree_queried_indices = []
print getTime(), 'kdtree single queries for kNN start, k=', features_k_nearest_neighbors
count = 0
for point in ((self.processor.pts3d_bound.T)[nonzero_indices]):
count = count + 1
result = self.kdtree2d.query(point, features_k_nearest_neighbors,0.2,2,self.processor.feature_radius)
#existing = result[0][0] != np.Inf
#print existing
#print result[1]
self.kdtree_queried_indices += [result[1]] #[existing]
if count % 4096 == 0:
print getTime(),count
print getTime(), 'kdtree singe queries end'
#convert to numpy array -> faster access
self.kdtree_queried_indices = np.asarray(self.kdtree_queried_indices)
#print self.kdtree_queried_indices
#takes long to compute:
#avg_len = 0
#minlen = 999999
#maxlen = 0
#for x in self.kdtree_queried_indices:
# avg_len += len(x)
# minlen = min(minlen, len(x))
# maxlen = max(maxlen, len(x))
#avg_len = avg_len / len(self.kdtree_queried_indices)
#print getTime(), "range neighbors: avg_len", avg_len, 'minlen', minlen, 'maxlen', maxlen
#create HSV numpy images:
# compute the hsv version of the image
image_size = cv.cvGetSize(self.processor.img)
img_h = cv.cvCreateImage (image_size, 8, 1)
img_s = cv.cvCreateImage (image_size, 8, 1)
img_v = cv.cvCreateImage (image_size, 8, 1)
img_hsv = cv.cvCreateImage (image_size, 8, 3)
cv.cvCvtColor (self.processor.img, img_hsv, cv.CV_BGR2HSV)
cv.cvSplit (img_hsv, img_h, img_s, img_v, None)
self.imNP_h = ut.cv2np(img_h)
self.imNP_s = ut.cv2np(img_s)
self.imNP_v = ut.cv2np(img_v)
textures = texture_features.eigen_texture(self.processor.img)
self.imNP_tex1 = textures[:,:,0]
self.imNP_tex2 = textures[:,:,1]
self.debug_before_first_featurevector = True
self.generate_voi_histogram(self.processor.point_of_interest,self.processor.voi_width)
#has to have at least length 2 because of openCV matrices!!!!
def get_indexvector(self, type):
var_idx = []
#start indices
rh1 = 0 #zhist, normal, eigenvalue1, ev2
ch1 = rh1 + 6 #hsi zhist, maxheight-diff, tex1, tex2
ci = ch1 + 25
end = ci + 4 #
if type=='range':
for i in range(rh1, ch1):
var_idx.append(i)
elif type=='color':
for i in range(ch1, end):
var_idx.append(i)
#for plotting:
elif type=='hsvi':
for i in range(ci,end):
var_idx.append(i)
else: #all
for i in range(rh1, end):
var_idx.append(i)
return np.array(var_idx)
#get the feature vector for a specific point
def get_featurevector(self, index, count, pts = None):
if pts == None:
pts = self.processor.pts3d_bound
#print 'i',index,'c', count
fv = []
indices = np.asarray(self.kdtree_queried_indices[count])
invalid_value = np.shape(pts)[1]
#print indices
#print 'iv',invalid_value
indices = indices[indices != invalid_value]
#print getTime(), indices
#print getTime(), 'number of pts', len(indices)
a = pts[:,indices]
view = processor.rotate_to_plane(self.processor.scan_dataset.ground_plane_normal, np.matrix([-1,0,0.]).T)
normal, eigenvalues = gaussian_curvature.gaussian_curvature(a,view)
#eigenvalues = eigenvalues / np.square(r)
#fv += [normal[0,0],0,normal[2,0]]
#fv += normal.T.A[0].tolist()
#fv += eigenvalues.tolist()
#print np.asarray(pts[:,index].T[0])[0]
# print 'pt',np.asarray(pts[:,index].T[0])
point = pts[:,index]
ev1, ev2 = self.get_voi_histogram_spread(point)
#z_max_height_diff = pts[2,index] - self.get_voi_maxcount_height()
#fv += [self.get_voi_histogram_value(point),z_max_height_diff,normal[0,0],normal[1,0],normal[2,0], ev1, ev2]
fv += [self.get_voi_histogram_value(point),normal[0,0],normal[1,0],normal[2,0], ev1, ev2]
h = self.imNP_h[self.processor.map2d[1,index],self.processor.map2d[0,index]]
s = self.imNP_s[self.processor.map2d[1,index],self.processor.map2d[0,index]]
i = self.processor.intensities_bound[index]
hsi = self.get_voi_hsi_histogram_values(point,h,s,i)
fv += [hsi[0],hsi[1],hsi[2]]
#print np.shape(self.imNP_tex1)
#print np.shape(self.map2d)
tex1 = self.imNP_tex1[self.processor.map2d[1,index],self.processor.map2d[0,index]]
tex2 = self.imNP_tex2[self.processor.map2d[1,index],self.processor.map2d[0,index]]
fv += [tex1, tex2]
#print tex1, tex2
#color histograms:
colors_h = []
colors_s = []
colors_v = []
for idx in indices:
colors_h.append(float(self.imNP_h[self.processor.map2d[1,idx],self.processor.map2d[0,idx]]))
colors_s.append(float(self.imNP_s[self.processor.map2d[1,idx],self.processor.map2d[0,idx]]))
colors_v.append(float(self.imNP_v[self.processor.map2d[1,idx],self.processor.map2d[0,idx]]))
color_hist = stats.histogram2(np.array(colors_h), [0,51,102,153,204])
color_hist = color_hist / float(np.sum(color_hist))
color_hist = list(color_hist)
fv += color_hist
color_hist = stats.histogram2(np.array(colors_s), [0,51,102,153,204])
color_hist = color_hist / float(np.sum(color_hist))
color_hist = list(color_hist)
fv += color_hist
color_hist = stats.histogram2(np.array(colors_v), [0,51,102,153,204])
color_hist = color_hist / float(np.sum(color_hist))
color_hist = list(color_hist)
fv += color_hist
#intensities
intensities = self.processor.intensities_bound[indices]
intensities = np.asarray(intensities)
#map to 0-255-range: TODO: perhaps do some nonlinear transformation here?
intensities = intensities / 10000 * 255
intensity_hist = stats.histogram2(intensities, [0,51,102,153,204])
intensity_hist = intensity_hist / float(np.sum(intensity_hist))
intensity_hist = list(intensity_hist)
fv += intensity_hist
#current colors:
fv += [float(self.imNP_h[self.processor.map2d[1,index],self.processor.map2d[0,index]]) / 255.0]
fv += [float(self.imNP_s[self.processor.map2d[1,index],self.processor.map2d[0,index]]) / 255.0]
fv += [float(self.imNP_v[self.processor.map2d[1,index],self.processor.map2d[0,index]]) / 255.0]
#current intensity value (scaled)
intensity = self.processor.intensities_bound[index]
#scale:
intensity = intensity / 15000.0
intensity = [intensity]
fv += intensity
if self.debug_before_first_featurevector == True:
self.debug_before_first_featurevector = False
print getTime(), 'feature vector sample(gaussian histograms):', fv
return fv
#cube of interest around point
def generate_voi_histogram(self, poi, width):
print 'poi',poi,'width',width
pts_indices = self.get_voi_pts_indices(poi, width)
self.voi_pts_indices = pts_indices
pts = np.asarray(self.processor.pts3d_bound)
pts = pts[:,pts_indices]
self.voi_pts = pts
#mlab.points3d(pts[0,:],pts[1,:],pts[2,:], mode='point')
#mlab.show()
min = 0.
max = 2.
self.voi_bincount = 80
self.voi_interval_size = max - min
bins = np.asarray(range(self.voi_bincount)) * self.voi_interval_size/float(self.voi_bincount)
#print 'bins',bins
hist = stats.histogram2(pts[2],bins) / float(len(pts[2]))
#print 'zhist',hist
#print zip(bins, hist)
self.z_hist = hist
self.z_hist_bins = bins
slices = self.get_voi_slice_indices()
self.z_hist_slices_indices = slices
#precalculate spread values:
self.z_hist_spread = []
for indices in self.z_hist_slices_indices:
a = self.processor.pts3d_bound[:,indices]
u, ev12 = gaussian_curvature.spread(a)
self.z_hist_spread += [(ev12[0], ev12[1])]
#create h,s histograms:
pts_h = []
pts_s = []
#print self.processor.pts3d_bound
n,m = np.shape(np.asarray(self.processor.pts3d_bound))
#print 'm',m,'len(self.processor.pts3d_bound[2,:].A1)',len(self.processor.pts3d_bound[2,:].A1)
for index in range(m):
pts_h.append(float(self.imNP_h[self.processor.map2d[1,index],self.processor.map2d[0,index]]))
for index in range(m):
pts_s.append(float(self.imNP_s[self.processor.map2d[1,index],self.processor.map2d[0,index]]))
pts_i = np.asarray(self.processor.intensities_bound)
#print 'ptsi',pts_i
if np.max(pts_i) > 0:
self.intensity_normalization_factor = 1.0 / float(np.max(pts_i)) * 255
else:
self.intensity_normalization_factor = 1.
#print 'self.intensity_normalization_factor', self.intensity_normalization_factor
#print pts_i
pts_i *= self.intensity_normalization_factor
pts_h = np.asarray(pts_h)
pts_s = np.asarray(pts_s)
self.z_hist_h_hists = []
self.z_hist_s_hists = []
self.z_hist_i_hists = []
#normalize by maximum slice:
max_count = 0
max_count_index = 0
for count_idx, indices in enumerate(slices):
n = np.shape(indices)
if n[0] > max_count:
max_count = n[0]
max_count_index = count_idx
slize_height = (self.voi_interval_size / float(self.voi_bincount))
self.z_hist_height_max = slize_height * (max_count_index + 0.5)
#print 'max_count', max_count,'index',max_count_index, 'height in max bin', self.z_hist_height_max
for indices in slices:
pts_h_slice = pts_h[indices]
pts_s_slice = pts_s[indices]
pts_i_slice = pts_i[indices]
self.hsi_hist_bincount = 5
bins = np.asarray(range(0,self.hsi_hist_bincount))*float(255.0/float(self.hsi_hist_bincount))
#print bins
#todo: smooth with kernel fct
count = float(len(pts_h_slice))
if count == 0:
count = 1
hist_h = stats.histogram2(pts_h_slice,bins) / count
self.z_hist_h_hists.append(hist_h)
hist_s = stats.histogram2(pts_s_slice,bins) / count
self.z_hist_s_hists.append(hist_s)
hist_i = stats.histogram2(pts_i_slice,bins) / count
#print 'hist_i', hist_i, pts_i_slice, bins, pts_i
self.z_hist_i_hists.append(hist_i)
#print 'hh',self.z_hist_h_hists
#print 'sh',self.z_hist_s_hists
#print 'ih',self.z_hist_i_hists
def get_voi_pts_indices(self, poi, width):
pts = np.asarray(self.processor.pts3d_bound)
#region of interest:
conditions = np.multiply(np.multiply(np.multiply(np.multiply(np.multiply(pts[0] < poi[0]+width/2.0, pts[0] > poi[0]-width/2.0),
pts[1] < poi[1]+width/2.0), pts[1] > poi[1]-width/2.0),
pts[2] < poi[2]+width/2.0), pts[2] > poi[2]-width/2.0)
indices = np.where(conditions)[0]
return indices
def get_voi_slice_indices(self):
slices = []
last_z = -999999
for z in self.z_hist_bins:
indices = copy.copy(self.voi_pts_indices)
pts = self.voi_pts
conditions = np.multiply(pts[2] < z, pts[2] > last_z)
indices = indices[np.where(conditions)[0]]
slices += [indices]
last_z = z
return slices
def get_voi_histogram_value(self, point):
z = point[2]
z = int(z*self.voi_bincount / float(self.voi_interval_size))
if z >= 0 and z < self.voi_bincount:
# print z, self.z_hist[z]
return self.z_hist[z]
else:
#print z,0
return 0
def get_voi_histogram_spread(self, point):
z = point[2]
z = int(z*self.voi_bincount / float(self.voi_interval_size))
if z >= 0 and z < self.voi_bincount:
# indices = self.z_hist_slices_indices[z]
# a = self.processor.pts3d_bound[:,indices]
# u, ev12 = gaussian_curvature.spread(a)
# if abs(self.z_hist_spread[z][0] - ev12[0]) > 0.0000000001 or abs(self.z_hist_spread[z][1] - ev12[1]) > 0.0000000001:
# print 'ERROR', self.z_hist_spread[z], '!=', (ev12[0], ev12[1])
# return ev12[0], ev12[1]
return self.z_hist_spread[z]
else:
#print z,0
return 0, 0
def get_voi_hsi_histogram_values(self, point,h ,s, i):
z = point[2]
z = int(z*self.voi_bincount / float(self.voi_interval_size))
if z >= 0 and z < self.voi_bincount:
h_index = int(h * self.hsi_hist_bincount / 255.0)
s_index = int(s * self.hsi_hist_bincount / 255.0)
i *= self.intensity_normalization_factor
i_index = int(i * self.hsi_hist_bincount / 255.0)
h_hist = self.z_hist_h_hists[z][h_index]
s_hist = self.z_hist_s_hists[z][s_index]
#print 'z',z,'i_index',i_index, i
#print self.z_hist_i_hists, np.shape(self.z_hist_i_hists)
i_hist = self.z_hist_i_hists[z][i_index]
return h_hist, s_hist, i_hist
else:
#print z,0
return 0, 0, 0
def get_voi_maxcount_height(self):
return self.z_hist_height_max
| [
[
1,
0,
0.0691,
0.0023,
0,
0.66,
0,
479,
0,
1,
0,
0,
479,
0,
0
],
[
1,
0,
0.0737,
0.0023,
0,
0.66,
0.0833,
82,
0,
1,
0,
0,
82,
0,
0
],
[
1,
0,
0.076,
0.0023,
0,
0.6... | [
"from features import features",
"import laser_camera_segmentation.gaussian_curvature as gaussian_curvature",
"import scipy.stats as stats",
"import numpy as np",
"import opencv as cv",
"import scipy.spatial.kdtree as kdtree",
"import hrl_lib.util as ut",
"from hrl_lib.util import getTime",
"import ... |
#
# Copyright (c) 2010, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Martin Schuster (Healthcare Robotics Lab, Georgia Tech.)
import roslib; roslib.load_manifest('laser_camera_segmentation')
import hrl_tilting_hokuyo.tilt_hokuyo_servo as ths
#import hokuyo.hokuyo_processing as hp
import hrl_hokuyo.hokuyo_scan as hs
import sys
from labeling import label_object, scan_dataset, scans_database
from opencv import highgui
import hrl_lib.util as ut
class scanner:
def __init__(self, configuration):
self.config = configuration
self.webcam = False
self.thok = False
self.img = False
#use the actual hardware
def init_webcam(self):
if self.webcam:
return
if self.config.device == 'codyRobot':
import camera
self.webcam = camera.Camera(self.config.cam_name)
else:
import webcamera
self.webcam = webcamera.Webcamera(self.config.cam_name, self.config.webcam_id)
# img = False
# cam_index = -1
# while not(img) and cam_index < 20:
# cam_index = cam_index + 1
# try:
# del self.webcam
# del webcamera
# import webcamera
# self.webcam = webcamera.Webcamera('DesktopWebcam', cam_index)
# img = self.webcam.get_frame()
# except:
# print "Unexpected error:", sys.exc_info()[0]
# print "try again...with next webcam" + str(cam_index)
# pass
# if not(img):
# print 'ERROR: Webcam init FAILED'
# return
#use the actual hardware
def init_thok(self):
if self.thok:
return
print "Init THOK"
self.hok = hs.Hokuyo('utm',self.config.thok_hoknum,flip=False)
self.thok = ths.tilt_hokuyo(self.config.thok_devname,self.config.thok_servonum,self.hok,l1=self.config.thok_l1,l2=self.config.thok_l2)
print "Init THOK done"
def capture_image(self):
self.init_webcam()
del self.img
self.img = False
count = 0
print 'capture image...'
while not(self.img) and count < 20:
count = count + 1
try:
#call get_frame several times to really get a new picture(!)
for i in xrange(10):
self.img = self.webcam.get_frame()
except:
print "Unexpected error:", sys.exc_info()[0]
print "try again..."
pass
print 'count:'+str(count)
return self.img
def capture_laserscan(self, number_of_scans = 1, angle = None):
self.init_thok()
self.laserscans = []
if angle != None:
tilt_angles = (self.config.thok_tilt_angles[0] + angle, self.config.thok_tilt_angles[1] + angle)
else:
tilt_angles = self.config.thok_tilt_angles
for i in range(number_of_scans):
pos_list,scan_list = self.thok.scan(tilt_angles,speed=self.config.thok_scan_speed,save_scan=False)
self.laserscans.append((pos_list,scan_list))
#print scan_list[0]
return self.laserscans
def save_data(self,name, metadata=True, angle = None):
dict = {'laserscans' : self.laserscans,
'l1': self.config.thok_l1, 'l2': self.config.thok_l2,
'image_angle' : angle}
prefix = self.config.path+'/data/'+name
print "Saving: "+prefix+'_laserscans.pkl'
ut.save_pickle(dict,prefix+'_laserscans.pkl')
print "Saving: "+prefix+'_image.png'
highgui.cvSaveImage(prefix+'_image.png',self.img)
if metadata:
# save metadata to database:
database = scans_database.scans_database()
database.load(self.config.path,'database.pkl')
dataset = scan_dataset.scan_dataset()
dataset.id = name
dataset.scan_filename = 'data/'+name+'_laserscans.pkl'
dataset.image_filename = 'data/'+name+'_image.png'
database.add_dataset(dataset)
database.save()
return name
def take_artag_image(self):
img = self.capture_image()
return img
def save_artag_image(self,name):
filename = self.config.path+'/data/'+name+'_artag_image.png'
print "Saving: "+filename
highgui.cvSaveImage(filename,self.img)
return '/data/'+name+'_artag_image.png'
#capture image and laserscans, save image, scan
def capture_and_save(self,name, metadata=True, angle = None):
self.init_webcam()
self.init_thok()
if None != angle:
self.thok.servo.move_angle(angle)
angle = self.thok.servo.read_angle()
self.capture_image()
self.capture_laserscan(1, angle)
return self.save_data(name, metadata, angle) | [
[
1,
0,
0.1639,
0.0055,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.1639,
0.0055,
0,
0.66,
0.125,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.1694,
0.0055,
0,
0.6... | [
"import roslib; roslib.load_manifest('laser_camera_segmentation')",
"import roslib; roslib.load_manifest('laser_camera_segmentation')",
"import hrl_tilting_hokuyo.tilt_hokuyo_servo as ths",
"import hrl_hokuyo.hokuyo_scan as hs",
"import sys",
"from labeling import label_object, scan_dataset, scans_databas... |
#
# Copyright (c) 2009, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# \author Advait Jain (Healthcare Robotics Lab, Georgia Tech.)
import roslib; roslib.load_manifest('UI_segment_object')
import rospy
from UI_segment_object.srv import GetPt
from UI_segment_object.srv import None_Bool
reset_ui = None
get_3d_point = None
def initialize_service():
global reset_ui, get_3d_point
reset_srv_name = 'UI_reset'
srv_name = 'get_3D_pt'
rospy.loginfo('waiting for service: %s'%reset_srv_name)
rospy.wait_for_service(reset_srv_name)
rospy.loginfo('waiting for service: %s'%srv_name)
rospy.wait_for_service(srv_name)
rospy.loginfo('Done')
reset_ui = rospy.ServiceProxy(reset_srv_name, None_Bool)
get_3d_point = rospy.ServiceProxy(srv_name, GetPt)
def get_point():
global reset_ui, get_3d_point
reset_ui()
resp = get_3d_point()
return resp.pt.x, resp.pt.y, resp.pt.z
if __name__ == '__main__':
rospy.init_node('point_and_click_client')
initialize_service()
x,y,z = get_point()
print '3D point:', x, y, z
| [
[
1,
0,
0.4348,
0.0145,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.4348,
0.0145,
0,
0.66,
0.1111,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.4638,
0.0145,
0,
0.... | [
"import roslib; roslib.load_manifest('UI_segment_object')",
"import roslib; roslib.load_manifest('UI_segment_object')",
"import rospy",
"from UI_segment_object.srv import GetPt",
"from UI_segment_object.srv import None_Bool",
"reset_ui = None",
"get_3d_point = None",
"def initialize_service():\n gl... |
#
# Copyright (c) 2009, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Author: Advait Jain
import matplotlib.pyplot as pp
import numpy as np
import roslib; roslib.load_manifest('epc_door_opening')
import hrl_lib.util as ut
import doors_forces_kinematics.arm_trajectories_ram as atr
d = ut.load_pickle('pkls/ikea_cabinet_log.pkl')
#d = ut.load_pickle('pkls/ikea_cabinet_2.pkl')
#d = ut.load_pickle('pkls/lab_cabinet_log.pkl')
typ = 'rotary'
pr2_log = True
d['f_list'] = d['f_list_estimate']
h_config, h_ftan_estimate = atr.force_trajectory_in_hindsight(d, typ, pr2_log)
pp.plot(np.degrees(h_config), h_ftan_estimate, 'ro-', mew=0, ms=0,
label='estimate')
if 'f_list_torques' in d:
d['f_list'] = d['f_list_torques']
h_config, h_ftan_torques = atr.force_trajectory_in_hindsight(d, typ,
pr2_log)
pp.plot(np.degrees(h_config), h_ftan_torques, 'go-', mew=0, ms=0,
label='torques')
d['f_list'] = d['f_list_ati']
h_config, h_ftan_ati = atr.force_trajectory_in_hindsight(d, typ, pr2_log)
pp.plot(np.degrees(h_config), h_ftan_ati, 'bo-', mew=0, ms=0,
label='ATI')
pp.legend()
pp.show()
| [
[
1,
0,
0.4545,
0.0152,
0,
0.66,
0,
596,
0,
1,
0,
0,
596,
0,
0
],
[
1,
0,
0.4697,
0.0152,
0,
0.66,
0.0588,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.5,
0.0152,
0,
0.6... | [
"import matplotlib.pyplot as pp",
"import numpy as np",
"import roslib; roslib.load_manifest('epc_door_opening')",
"import roslib; roslib.load_manifest('epc_door_opening')",
"import hrl_lib.util as ut",
"import doors_forces_kinematics.arm_trajectories_ram as atr",
"d = ut.load_pickle('pkls/ikea_cabinet_... |
#!/usr/bin/env python
#
# Copyright (c) 2009, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Author: Advait Jain
import roslib; roslib.load_manifest('epc_door_opening')
import rospy
from epc_door_opening.msg import MechanismKinematicsRot
from geometry_msgs.msg import Point32
import doors_forces_kinematics.arm_trajectories as at
from threading import RLock
import numpy as np
import time
##
# fit circle to the trajectory, publish the computed kinematics.
# @param cartesian_pts_list - list of 3-tuples. trajectory of the mechanism
# @param pbshr - publisher for the MechanismKinematics message
# @param lock - to make list operations thread safe. (there is a callback too.)
def circle_estimator(cartesian_pts_list, pbshr, lock):
lock.acquire()
n_pts = len(cartesian_pts_list)
pts_2d = (np.matrix(cartesian_pts_list).T)[0:2,:]
lock.release()
if n_pts<2:
time.sleep(0.1)
#pbshr.publish(mk) # don't publish anything.
return
st = pts_2d[:,0]
now = pts_2d[:,-1]
mk = MechanismKinematicsRot()
mk.cx = 0.5
mk.cy = -3.5
mk.cz = cartesian_pts_list[0][2]
mk.rad = 10.
dist_moved = np.linalg.norm(st-now)
# if dist_moved<=0.05:
# reject_pts_num = n_pts
# else:
# reject_pts_num = 1
reject_pts_num = 1
if dist_moved<=0.15:
time.sleep(0.1)
pbshr.publish(mk)
return
pts_2d = pts_2d[:,reject_pts_num:]
#rad = 0.3
rad = 1.1
start_pos = st
rad,cx,cy = at.fit_circle(rad, start_pos[0,0], start_pos[1,0]-rad,
pts_2d, method='fmin_bfgs',
verbose=False, rad_fix = False)
#verbose=False, rad_fix = True)
#print 'rad, cx, cy:', rad, cx, cy
#print 'n_pts:', n_pts
mk.cx = cx
mk.cy = cy
mk.rad = rad
pbshr.publish(mk)
#rospy.logout('oye hoye hoye')
# append the point to the trajectory
def trajectory_cb(pt32, tup):
cp_list, lock = tup
lock.acquire()
cp_list.append([pt32.x, pt32.y, pt32.z])
lock.release()
if __name__ == '__main__':
cartesian_points_list = []
lock = RLock()
rospy.init_node('kinematics_estimator_least_sq')
mech_kin_pub = rospy.Publisher('mechanism_kinematics_rot',
MechanismKinematicsRot)
rospy.Subscriber('mechanism_trajectory', Point32, trajectory_cb,
(cartesian_points_list, lock))
rospy.logout('Begin')
while not rospy.is_shutdown():
circle_estimator(cartesian_points_list, mech_kin_pub, lock)
rospy.sleep(0.01)
rospy.logout('End')
| [
[
1,
0,
0.2541,
0.0082,
0,
0.66,
0,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.2541,
0.0082,
0,
0.66,
0.0909,
630,
3,
1,
0,
0,
0,
0,
1
],
[
1,
0,
0.2623,
0.0082,
0,
0.... | [
"import roslib; roslib.load_manifest('epc_door_opening')",
"import roslib; roslib.load_manifest('epc_door_opening')",
"import rospy",
"from epc_door_opening.msg import MechanismKinematicsRot",
"from geometry_msgs.msg import Point32",
"import doors_forces_kinematics.arm_trajectories as at",
"from threadi... |
#
# Copyright (c) 2009, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Author: Advait Jain
import numpy as np, math
import copy
from threading import RLock
import roslib; roslib.load_manifest('epc_door_opening')
import rospy
from epc_door_opening.msg import MechanismKinematicsRot
from epc_door_opening.msg import MechanismKinematicsJac
from epc_door_opening.msg import ForceTrajectory
from geometry_msgs.msg import Point32
from std_msgs.msg import Empty
import epc_core.epc as epc
import hrl_lib.util as ut
class Door_EPC(epc.EPC):
def __init__(self, robot):
epc.EPC.__init__(self, robot)
self.mech_kinematics_lock = RLock()
self.fit_circle_lock = RLock()
rospy.Subscriber('mechanism_kinematics_rot',
MechanismKinematicsRot,
self.mechanism_kinematics_rot_cb)
rospy.Subscriber('epc/stop', Empty, self.stop_cb)
# used in the ROS stop_cb and equi_pt_generator_control_radial_force
self.force_traj_pub = rospy.Publisher('epc/force_test', ForceTrajectory)
self.mech_traj_pub = rospy.Publisher('mechanism_trajectory', Point32)
def init_log(self):
self.f_list = []
self.f_list_ati = []
self.f_list_estimate = []
self.f_list_torques = []
self.cep_list = []
self.ee_list = []
self.ft = ForceTrajectory()
if self.mechanism_type != '':
self.ft.type = self.mechanism_type
else:
self.ft.type = 'rotary'
def log_state(self, arm):
# only logging the right arm.
f = self.robot.get_wrist_force_ati(arm, base_frame=True)
self.f_list_ati.append(f.A1.tolist())
f = self.robot.get_wrist_force_estimate(arm, base_frame=True)
self.f_list_estimate.append(f.A1.tolist())
f = self.robot.get_force_from_torques(arm)
self.f_list_torques.append(f.A1.tolist())
f = self.robot.get_wrist_force(arm, base_frame=True)
self.f_list.append(f.A1.tolist())
cep, _ = self.robot.get_cep_jtt(arm, hook_tip=True)
self.cep_list.append(cep.A1.tolist())
# ee, _ = self.robot.get_ee_jtt(arm)
ee, _ = self.robot.end_effector_pos(arm)
self.ee_list.append(ee.A1.tolist())
if self.started_pulling_on_handle == False:
if f[0,0] > 10.:
self.started_pulling_on_handle_count += 1
else:
self.started_pulling_on_handle_count = 0
self.init_log() # reset logs until started pulling on the handle.
self.init_tangent_vector = None
if self.started_pulling_on_handle_count > 1:
self.started_pulling_on_handle = True
return ''
## ROS callback. Stop and maintain position.
def stop_cb(self, cmd):
self.stopping_string = 'stop_cb called.'
def common_stopping_conditions(self):
stop = ''
# right arm only.
wrist_force = self.robot.get_wrist_force(0, base_frame=True)
mag = np.linalg.norm(wrist_force)
if mag > self.eq_force_threshold:
stop = 'force exceed'
if mag < 1.2 and self.hooked_location_moved:
if (self.prev_force_mag - mag) > 30.:
stop = 'slip: force step decrease and below thresold.'
else:
self.slip_count += 1
else:
self.slip_count = 0
if self.slip_count == 10:
stop = 'slip: force below threshold for too long.'
return stop
def mechanism_kinematics_rot_cb(self, mk):
self.fit_circle_lock.acquire()
self.cx_start = mk.cx
self.cy_start = mk.cy
self.cz_start = mk.cz
self.rad = mk.rad
self.fit_circle_lock.release()
## constantly update the estimate of the kinematics and move the
# equilibrium point along the tangent of the estimated arc, and
# try to keep the radial force constant.
# @param h_force_possible - True (hook side) or False (hook up).
# @param v_force_possible - False (hook side) or True (hook up).
# Is maintaining a radial force possible or not (based on hook
# geometry and orientation)
# @param cep_vel - tangential velocity of the cep in m/s
def cep_gen_control_radial_force(self, arm, cep, cep_vel):
self.log_state(arm)
if self.started_pulling_on_handle == False:
cep_vel = 0.02
#step_size = 0.01 * cep_vel
step_size = 0.1 * cep_vel # 0.1 is the time interval between calls to the equi_generator function (see pull)
stop = self.common_stopping_conditions()
wrist_force = self.robot.get_wrist_force(arm, base_frame=True)
mag = np.linalg.norm(wrist_force)
curr_pos, _ = self.robot.get_ee_jtt(arm)
if len(self.ee_list)>1:
start_pos = np.matrix(self.ee_list[0]).T
else:
start_pos = curr_pos
#mechanism kinematics.
if self.started_pulling_on_handle:
self.mech_traj_pub.publish(Point32(curr_pos[0,0],
curr_pos[1,0], curr_pos[2,0]))
self.fit_circle_lock.acquire()
rad = self.rad
cx_start, cy_start = self.cx_start, self.cy_start
cz_start = self.cz_start
self.fit_circle_lock.release()
cx, cy = cx_start, cy_start
cz = cz_start
print 'cx, cy, r:', cx, cy, rad
radial_vec = curr_pos - np.matrix([cx,cy,cz]).T
radial_vec = radial_vec/np.linalg.norm(radial_vec)
if cy_start < start_pos[1,0]:
tan_x,tan_y = -radial_vec[1,0],radial_vec[0,0]
else:
tan_x,tan_y = radial_vec[1,0],-radial_vec[0,0]
if tan_x > 0. and (start_pos[0,0]-curr_pos[0,0]) < 0.09:
tan_x = -tan_x
tan_y = -tan_y
if cy_start > start_pos[1,0]:
radial_vec = -radial_vec # axis to the left, want force in
# anti-radial direction.
rv = radial_vec
force_vec = np.matrix([rv[0,0], rv[1,0], 0.]).T
tangential_vec = np.matrix([tan_x, tan_y, 0.]).T
tangential_vec_ts = tangential_vec
radial_vec_ts = radial_vec
force_vec_ts = force_vec
if arm == 'right_arm' or arm == 0:
if force_vec_ts[1,0] < 0.: # only allowing force to the left
force_vec_ts = -force_vec_ts
else:
if force_vec_ts[1,0] > 0.: # only allowing force to the right
force_vec_ts = -force_vec_ts
f_vec = -1*np.array([wrist_force[0,0], wrist_force[1,0],
wrist_force[2,0]])
f_rad_mag = np.dot(f_vec, force_vec.A1)
err = f_rad_mag-4.
if err>0.:
kp = -0.1
else:
kp = -0.2
radial_motion_mag = kp * err # radial_motion_mag in cm (depends on eq_motion step size)
radial_motion_vec = force_vec * radial_motion_mag
print 'tangential_vec:', tangential_vec.A1
eq_motion_vec = copy.copy(tangential_vec)
eq_motion_vec += radial_motion_vec
self.prev_force_mag = mag
if self.init_tangent_vector == None or self.started_pulling_on_handle == False:
self.init_tangent_vector = copy.copy(tangential_vec_ts)
c = np.dot(tangential_vec_ts.A1, self.init_tangent_vector.A1)
ang = np.arccos(c)
if np.isnan(ang):
ang = 0.
tangential_vec = tangential_vec / np.linalg.norm(tangential_vec) # paranoia abot vectors not being unit vectors.
dist_moved = np.dot((curr_pos - start_pos).A1, tangential_vec_ts.A1)
ftan = abs(np.dot(wrist_force.A1, tangential_vec.A1))
self.ft.tangential_force.append(ftan)
self.ft.radial_force.append(f_rad_mag)
if self.ft.type == 'rotary':
self.ft.configuration.append(ang)
else: # drawer
print 'dist_moved:', dist_moved
self.ft.configuration.append(dist_moved)
if self.started_pulling_on_handle:
self.force_traj_pub.publish(self.ft)
# if self.started_pulling_on_handle == False:
# ftan_pull_test = -np.dot(wrist_force.A1, tangential_vec.A1)
# print 'ftan_pull_test:', ftan_pull_test
# if ftan_pull_test > 5.:
# self.started_pulling_on_handle_count += 1
# else:
# self.started_pulling_on_handle_count = 0
# self.init_log() # reset logs until started pulling on the handle.
# self.init_tangent_vector = None
#
# if self.started_pulling_on_handle_count > 1:
# self.started_pulling_on_handle = True
if abs(dist_moved) > 0.09 and self.hooked_location_moved == False:
# change the force threshold once the hook has started pulling.
self.hooked_location_moved = True
self.eq_force_threshold = ut.bound(mag+30.,20.,80.)
self.ftan_threshold = 1.2 * self.ftan_threshold + 20.
if self.hooked_location_moved:
if abs(tangential_vec_ts[2,0]) < 0.2 and ftan > self.ftan_threshold:
stop = 'ftan threshold exceed: %f'%ftan
else:
self.ftan_threshold = max(self.ftan_threshold, ftan)
if self.hooked_location_moved and ang > math.radians(85.):
print 'Angle:', math.degrees(ang)
self.open_ang_exceed_count += 1
if self.open_ang_exceed_count > 2:
stop = 'opened mechanism through large angle: %.1f'%(math.degrees(ang))
else:
self.open_ang_exceed_count = 0
cep_t = cep + eq_motion_vec * step_size
cep[0,0] = cep_t[0,0]
cep[1,0] = cep_t[1,0]
cep[2,0] = cep_t[2,0]
print 'CEP:', cep.A1
stop = stop + self.stopping_string
return stop, (cep, None)
def pull(self, arm, force_threshold, cep_vel, mechanism_type=''):
self.mechanism_type = mechanism_type
self.stopping_string = ''
self.eq_pt_not_moving_counter = 0
self.init_log()
self.init_tangent_vector = None
self.open_ang_exceed_count = 0.
self.eq_force_threshold = force_threshold
self.ftan_threshold = 2.
self.hooked_location_moved = False # flag to indicate when the hooking location started moving.
self.prev_force_mag = np.linalg.norm(self.robot.get_wrist_force(arm))
self.slip_count = 0
self.started_pulling_on_handle = False
self.started_pulling_on_handle_count = 0
ee_pos, _ = self.robot.get_ee_jtt(arm)
self.cx_start = ee_pos[0,0]
self.rad = 10.0
self.cy_start = ee_pos[1,0]-self.rad
self.cz_start = ee_pos[2,0]
cep, _ = self.robot.get_cep_jtt(arm)
arg_list = [arm, cep, cep_vel]
result, _ = self.epc_motion(self.cep_gen_control_radial_force,
0.1, arm, arg_list, self.log_state,
#0.01, arm, arg_list,
control_function = self.robot.set_cep_jtt)
print 'EPC motion result:', result
print 'Original force threshold:', force_threshold
print 'Adapted force threshold:', self.eq_force_threshold
print 'Adapted ftan threshold:', self.ftan_threshold
d = {
'f_list': self.f_list, 'ee_list': self.ee_list,
'cep_list': self.cep_list, 'ftan_list': self.ft.tangential_force,
'config_list': self.ft.configuration, 'frad_list': self.ft.radial_force,
'f_list_ati': self.f_list_ati,
'f_list_estimate': self.f_list_estimate,
'f_list_torques': self.f_list_torques
}
ut.save_pickle(d,'pr2_pull_'+ut.formatted_time()+'.pkl')
def search_and_hook(self, arm, hook_loc, hooking_force_threshold = 5.,
hit_threshold=15., hit_motions = 1,
hook_direction = 'left'):
# this needs to be debugged. Hardcoded for now.
#if arm == 'right_arm' or arm == 0:
# hook_dir = np.matrix([0., 1., 0.]).T # hook direc in home position
# offset = -0.03
#elif arm == 'left_arm' or arm == 1:
# hook_dir = np.matrix([0., -1., 0.]).T # hook direc in home position
# offset = -0.03
#else:
# raise RuntimeError('Unknown arm: %s', arm)
#start_loc = hook_loc + rot_mat.T * hook_dir * offset
if hook_direction == 'left':
#offset = np.matrix([0., -0.03, 0.]).T
offset = np.matrix([0., -0.0, 0.]).T
move_dir = np.matrix([0., 1., 0.]).T
elif hook_direction == 'up':
#offset = np.matrix([0., 0., -0.03]).T
offset = np.matrix([0., 0., -0.0]).T
move_dir = np.matrix([0., 0., 1.]).T
start_loc = hook_loc + offset
# vector normal to surface and pointing into the surface.
normal_tl = np.matrix([1.0, 0., 0.]).T
pt1 = start_loc - normal_tl * 0.1
self.robot.go_cep_jtt(arm, pt1)
# raw_input('Hit ENTER to go')
vec = normal_tl * 0.2
rospy.sleep(1.)
for i in range(hit_motions):
s = self.move_till_hit(arm, vec=vec, force_threshold=hit_threshold, speed=0.07)
cep_start, _ = self.robot.get_cep_jtt(arm)
cep = copy.copy(cep_start)
arg_list = [arm, move_dir, hooking_force_threshold, cep, cep_start]
print 'Hi there.'
s = self.epc_motion(self.cep_gen_surface_follow, 0.1, arm,
arg_list, control_function = self.robot.set_cep_jtt)
print 'result:', s
return s
if __name__ == '__main__':
import pr2_arms.pr2_arms as pa
rospy.init_node('epc_pr2', anonymous = True)
rospy.logout('epc_pr2: ready')
pr2_arms = pa.PR2Arms(primary_ft_sensor='ati')
#pr2_arms = pa.PR2Arms(primary_ft_sensor='estimate')
door_epc = Door_EPC(pr2_arms)
r_arm, l_arm = 0, 1
arm = r_arm
tip = np.matrix([0.32, 0., 0.]).T
pr2_arms.arms.set_tooltip(arm, tip)
print 'Put the hook within the PR2 gripper'
raw_input('Hit ENTER to close')
pr2_arms.close_gripper(arm, effort=30)
raw_input('Turn off the motors and position the arm so that the hook is in an appropriate orientation')
# for cabinets.
#p1 = np.matrix([0.8, -0.40, -0.04]).T # pos 3
#p1 = np.matrix([0.8, -0.10, -0.04]).T # pos 2
p1 = np.matrix([0.85, -0.4, 0.1]).T # pos 1
pr2_arms.go_cep_jtt(arm, p1)
print 'Move the base so that hook is close to the handle'
raw_input('Hit ENTER to start Door Opening')
pr2_arms.close_gripper(arm, effort=80)
door_epc.search_and_hook(arm, p1, hook_direction='left')
door_epc.pull(arm, force_threshold=40., cep_vel=0.05)
# # hrl toolchest drawer.
# p1 = np.matrix([0.8, -0.2, -0.17]).T
# door_epc.search_and_hook(arm, p1, hook_direction='up')
# door_epc.pull(arm, force_threshold=40., cep_vel=0.05)
| [
[
1,
0,
0.0714,
0.0024,
0,
0.66,
0,
954,
0,
2,
0,
0,
954,
0,
0
],
[
1,
0,
0.0738,
0.0024,
0,
0.66,
0.0714,
739,
0,
1,
0,
0,
739,
0,
0
],
[
1,
0,
0.0762,
0.0024,
0,
... | [
"import numpy as np, math",
"import copy",
"from threading import RLock",
"import roslib; roslib.load_manifest('epc_door_opening')",
"import roslib; roslib.load_manifest('epc_door_opening')",
"import rospy",
"from epc_door_opening.msg import MechanismKinematicsRot",
"from epc_door_opening.msg import M... |
#!/usr/bin/python
#
# Copyright (c) 2009, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Author: Advait Jain
import numpy as np, math
import copy
from threading import RLock
import roslib; roslib.load_manifest('hrl_pr2_door_opening')
import rospy
from hrl_msgs.msg import FloatArray
from geometry_msgs.msg import Twist
def ft_cb(data):
lock.acquire()
ft_val[0] = data.linear.x
ft_val[1] = data.linear.y
ft_val[2] = data.linear.z
ft_val[3] = data.angular.x
ft_val[4] = data.angular.y
ft_val[5] = data.angular.z
lock.release()
if __name__ == '__main__':
import sys
rospy.init_node('ati_ft_emulator')
print sys.argv
# 4 for roslaunch
if len(sys.argv) != 2 and len(sys.argv) != 4:
rospy.logerr('Need to pass the topic name on the command line. Exiting...')
sys.exit()
topic = sys.argv[1]
lock = RLock()
ft_val = [0.] * 6
pub = rospy.Subscriber('/r_cart/state/wrench', Twist, ft_cb)
pub = rospy.Publisher(topic, FloatArray)
rospy.loginfo('Started the ATI FT emulator.')
rt = rospy.Rate(100)
while not rospy.is_shutdown():
lock.acquire()
send_ft_val = copy.copy(ft_val)
lock.release()
fa = FloatArray(rospy.Header(stamp=rospy.Time.now()), send_ft_val)
pub.publish(fa)
rt.sleep()
| [
[
1,
0,
0.3924,
0.0127,
0,
0.66,
0,
954,
0,
2,
0,
0,
954,
0,
0
],
[
1,
0,
0.4051,
0.0127,
0,
0.66,
0.1111,
739,
0,
1,
0,
0,
739,
0,
0
],
[
1,
0,
0.4177,
0.0127,
0,
... | [
"import numpy as np, math",
"import copy",
"from threading import RLock",
"import roslib; roslib.load_manifest('hrl_pr2_door_opening')",
"import roslib; roslib.load_manifest('hrl_pr2_door_opening')",
"import rospy",
"from hrl_msgs.msg import FloatArray",
"from geometry_msgs.msg import Twist",
"def f... |
#
# Copyright (c) 2009, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Author: Advait Jain
import math, numpy as np
import m3.rt_proxy as m3p
import m3.component_factory as m3f
import m3.toolbox as m3t
import m3.arm
THETA_GC = 5
THETA = 3
def safeop_things(proxy):
robot_name = 'm3humanoid_bimanual_mr1'
chain_names = ['m3arm_ma1', 'm3arm_ma2']
dynamatics_nms = ['m3dynamatics_ma1', 'm3dynamatics_ma2']
proxy.make_safe_operational(robot_name)
for c in chain_names:
proxy.make_safe_operational(c)
for d in dynamatics_nms:
proxy.make_safe_operational(d)
proxy = m3p.M3RtProxy()
proxy.start()
pwr_nm = 'm3pwr_pwr003'
pwr = m3f.create_component(pwr_nm)
proxy.publish_command(pwr)
joint_names = ['m3joint_ma1_j0', 'm3joint_ma1_j1', 'm3joint_ma1_j2', 'm3joint_ma1_j3', 'm3joint_ma1_j4', 'm3joint_ma1_j5', 'm3joint_ma1_j6']
comp_list = []
stiff_list = [0.2, 0.67, 1., 0.7, 0.75, 0.5, 0.5]
for i, c in enumerate(joint_names):
comp = m3f.create_component(c)
comp_list.append(comp)
proxy.publish_command(comp)
if i < 5:
comp.set_control_mode(THETA_GC)
else:
comp.set_control_mode(THETA)
comp.set_stiffness(stiff_list[i])
comp.set_slew_rate_proportion(1.)
# safeop_things must be after make_operational_all.
proxy.make_operational_all()
safeop_things(proxy)
#ma1 = m3.arm.M3Arm('m3arm_ma1')
#proxy.subscribe_status(ma1)
proxy.step()
proxy.step()
raw_input('Hit ENTER to power on')
pwr.set_motor_power_on()
proxy.step()
proxy.step()
raw_input('Hit ENTER to move the joint')
q = [0., 0., 0., 90., 0., 0., 0.]
q = np.radians(q)
for i, c in enumerate(comp_list):
c.set_theta_rad(q[i])
proxy.step()
proxy.step()
raw_input('Hit ENTER to stop')
proxy.stop()
#-------------- older code ---------------
##Force safe-op of robot, etc are present
#types=['m3humanoid','m3hand','m3gripper']
#for t in types:
# cc = proxy.get_available_components(t)
# for ccc in cc:
# print 'ccc:', ccc
# proxy.make_safe_operational(ccc)
#
#
##Force safe-op of chain so that gravity terms are computed
#chain=None
#if len(joint_names)>0:
# for j in joint_names:
# chain_name=m3t.get_joint_chain_name(j)
# print 'chain_name:', chain_name
# if chain_name!="":
# proxy.make_safe_operational(chain_name)
#
# #Force safe-op of chain so that gravity terms are computed
# dynamatics_name = m3t.get_chain_dynamatics_component_name(chain_name)
# print 'dynamatics_name:', dynamatics_name
# if dynamatics_name != "":
# proxy.make_safe_operational(dynamatics_name)
#
#
#
##Force safe-op of robot so that gravity terms are computed
#robot_name = m3t.get_robot_name()
#print 'robot_name:', robot_name
#if robot_name != "":
# proxy.make_safe_operational(robot_name)
| [
[
1,
0,
0.2109,
0.0068,
0,
0.66,
0,
526,
0,
2,
0,
0,
526,
0,
0
],
[
1,
0,
0.2177,
0.0068,
0,
0.66,
0.0312,
826,
0,
1,
0,
0,
826,
0,
0
],
[
1,
0,
0.2245,
0.0068,
0,
... | [
"import math, numpy as np",
"import m3.rt_proxy as m3p",
"import m3.component_factory as m3f",
"import m3.toolbox as m3t",
"import m3.arm",
"THETA_GC = 5",
"THETA = 3",
"def safeop_things(proxy):\n robot_name = 'm3humanoid_bimanual_mr1'\n chain_names = ['m3arm_ma1', 'm3arm_ma2']\n dynamatics_... |
#
# Copyright (c) 2009, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Author: Advait Jain
import numpy as np, math
import copy
import hrl_lib.transforms as tr
# dictionary for transforming different coorsinate frames to global coord frame
# global is NOT the world or fixed frame, its just a convenient global frame
_globalT = {
'torso' : None,
'thok0' : None,
'utm0' : None,
'utmcam0': None,
'mecanum': None
}
def create_globalTDict():
""" call the create functions for all the coord frames
"""
createTorsoTransform()
createThok0Transform()
createUtm0Transform()
createMecanumTransform()
def createTorsoTransform():
''' torso frame -> global frame
'''
disp = np.matrix([0.,0.,0.]).T
rot = np.matrix(np.eye(3))
t = tr.composeHomogeneousTransform(rot,disp)
_globalT['torso'] = t
def createThok0Transform():
''' thok0 frame -> global frame
'''
disp = np.matrix([0.,0.,0.09]).T
rot = np.matrix(np.eye(3))
t = tr.composeHomogeneousTransform(rot,disp)
_globalT['thok0'] = t
def createUtm0Transform():
''' utm0 frame -> global frame
'''
disp = copy.copy(tr.getDispSubMat(_globalT['thok0']))
disp[2,0] += 0.055
rot = np.matrix(np.eye(3))
t = tr.composeHomogeneousTransform(rot,disp)
_globalT['utm0'] = t
def createMecanumTransform():
''' mecanum frame -> global frame (ignores the zenither)
'''
disp = np.matrix([-0.28,0.,0.0]).T
rot = np.matrix(np.eye(3))
t = tr.composeHomogeneousTransform(rot,disp)
_globalT['mecanum'] = t
create_globalTDict()
def globalTmecanum(p,floating_vector=False):
''' 3x1 vector from mecanum to global.
'''
p_hom = tr.xyzToHomogenous(p, floating_vector)
p_gl = _globalT['mecanum'] * p_hom
if floating_vector == False:
return p_gl[0:3]/p_gl[3]
else:
return p_gl[0:3]
def mecanumTglobal(p,floating_vector=False):
''' 3x1 vector from global to mecanum.
'''
p_hom = tr.xyzToHomogenous(p, floating_vector)
p_gl = tr.invertHomogeneousTransform(_globalT['mecanum']) * p_hom
if floating_vector == False:
return p_gl[0:3]/p_gl[3]
else:
return p_gl[0:3]
def globalTtorso(p,floating_vector=False):
''' 3x1 vector from torso to global.
'''
p_hom = tr.xyzToHomogenous(p, floating_vector)
p_gl = _globalT['torso'] * p_hom
if floating_vector == False:
return p_gl[0:3]/p_gl[3]
else:
return p_gl[0:3]
def torsoTglobal(p,floating_vector=False):
''' 3x1 vector from global to torso.
'''
p_hom = tr.xyzToHomogenous(p, floating_vector)
p_gl = tr.invertHomogeneousTransform(_globalT['torso']) * p_hom
if floating_vector == False:
return p_gl[0:3]/p_gl[3]
else:
return p_gl[0:3]
def globalTthok0(p,floating_vector=False):
''' 3x1 vector from thok0 to global.
'''
p_hom = tr.xyzToHomogenous(p, floating_vector)
p_gl = _globalT['thok0'] * p_hom
if floating_vector == False:
return p_gl[0:3]/p_gl[3]
else:
return p_gl[0:3]
def thok0Tglobal(p,floating_vector=False):
''' 3x1 vector from global to thok0.
'''
p_hom = tr.xyzToHomogenous(p, floating_vector)
p_gl = tr.invertHomogeneousTransform(_globalT['thok0']) * p_hom
if floating_vector == False:
return p_gl[0:3]/p_gl[3]
else:
return p_gl[0:3]
def globalTutm0(p,floating_vector=False):
''' 3x1 vector from utm0 to global.
'''
p_hom = tr.xyzToHomogenous(p, floating_vector)
p_gl = _globalT['utm0'] * p_hom
if floating_vector == False:
return p_gl[0:3]/p_gl[3]
else:
return p_gl[0:3]
def utm0Tglobal(p,floating_vector=False):
''' 3x1 vector from global to utm0.
'''
p_hom = tr.xyzToHomogenous(p, floating_vector)
p_gl = tr.invertHomogeneousTransform(_globalT['utm0']) * p_hom
if floating_vector == False:
return p_gl[0:3]/p_gl[3]
else:
return p_gl[0:3]
## transformation matrix to go from global to utmcam0 coord frame.
# @param ang - servo angle (in RADIANS)
# @return 4x4 transformation matrix.
def utmcam0Tglobal_mat(ang):
thok0Tglobal_mat = tr.invertHomogeneousTransform(_globalT['thok0'])
# servo angle.
disp = np.matrix([0.,0.,0.]).T
tmat = tr.composeHomogeneousTransform(tr.Ry(ang),disp)*thok0Tglobal_mat
# cameraTlaser from thok_cam_calib.py
x = 0.012
y = -0.056
z = 0.035
r1 = 0.
r2 = 0.
r3 = -0.7
disp = np.matrix([-x,-y,-z]).T
r = tr.Rz(math.radians(-90))*tr.Ry(math.radians(90.))
disp = r*disp
r = r*tr.Rx(math.radians(r1))
r = r*tr.Ry(math.radians(r2))
r = r*tr.Rz(math.radians(r3))
t = tr.composeHomogeneousTransform(r, disp)
tmat = t*tmat
return tmat
## global to utmcam0 coord frame.
# @param p - 3xN np matrix.
# @param ang - servo angle (in RADIANS)
# @param floating_vector - interpretation of p. False -> position vector. True -> floating vector (rotation only).
# @return 3xN np matrix in the new coord frame.
def utmcam0Tglobal(p,ang,floating_vector=False):
t = utmcam0Tglobal_mat(ang)
p_hom = tr.xyzToHomogenous(p, floating_vector)
p_c = t * p_hom
if floating_vector == False:
pt = p_c[0:3]/p_c[3]
else:
pt = p_c[0:3]
return pt
## utmcam0 coord frame to global
# @param p - 3xN np matrix.
# @param ang - servo angle (in RADIANS)
# @param floating_vector - interpretation of p. False -> position vector. True -> floating vector (rotation only).
# @return 3xN np matrix in the new coord frame.
def globalTutmcam0(p,ang,floating_vector=False):
t = utmcam0Tglobal_mat(ang)
t = tr.invertHomogeneousTransform(t)
p_hom = tr.xyzToHomogenous(p, floating_vector)
p_c = t * p_hom
if floating_vector == False:
pt = p_c[0:3]/p_c[3]
else:
pt = p_c[0:3]
return pt
| [
[
1,
0,
0.1354,
0.0044,
0,
0.66,
0,
954,
0,
2,
0,
0,
954,
0,
0
],
[
1,
0,
0.1397,
0.0044,
0,
0.66,
0.05,
739,
0,
1,
0,
0,
739,
0,
0
],
[
1,
0,
0.1441,
0.0044,
0,
0.... | [
"import numpy as np, math",
"import copy",
"import hrl_lib.transforms as tr",
"_globalT = {\n 'torso' : None,\n 'thok0' : None,\n 'utm0' : None,\n 'utmcam0': None,\n 'mecanum': None\n}",
"def create_globalTDict():\n \"\"\" call the create functions for all the coord frames\n \"\"\"\n ... |
#!/usr/bin/python
#
# Copyright (c) 2009, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Author: Advait Jain
import m3.rt_proxy as m3p
import m3.arm
import m3.toolbox as m3t
import m3.pwr as m3w
import m3.loadx6
import m3.component_factory as m3f
import arm_client as ac
import math
import numpy as np
import sys, time, os
import copy
from threading import RLock
import arms as ar
import roslib; roslib.load_manifest('epc_core')
import rospy
import hrl_lib.transforms as tr
from hrl_msgs.msg import FloatArray
from roslib.msg import Header
from std_msgs.msg import Bool
from std_msgs.msg import Empty
from std_srvs.srv import Empty as Empty_srv
from std_srvs.srv import EmptyResponse
from sensor_msgs.msg import JointState
THETA_GC = 5
TORQUE_GC = 4
THETA = 3
OFF = 0 # in OFF mode also, the behavior is strange. Not the same as hitting the estop (Advait, Jan 1 2010)
## 1D kalman filter update.
def kalman_update(xhat, P, Q, R, z):
#time update
xhatminus = xhat
Pminus = P + Q
#measurement update
K = Pminus / (Pminus + R)
xhat = xhatminus + K * (z-xhatminus)
P = (1-K) * Pminus
return xhat, P
class MekaArmSettings():
def __init__(self, stiffness_list=[0.7,0.7,0.8,0.8,0.3],
control_mode='theta_gc'):
''' stiffness_list: list of 5 stiffness values for joints 0-4.
control_mode: 'theta_gc' or 'torque_gc'
'''
self.set_stiffness_scale(stiffness_list)
self.control_mode = control_mode
def set_stiffness_scale(self, l):
# for safety of wrist roll. Advait Jun 18, 2010.
# changed to 0.2 from 0.3 (Advait, Sept 19, 2010)
l[4] = min(l[4], 0.2)
self.stiffness_list = l
class MekaArmServer():
def __init__(self, right_arm_settings=None, left_arm_settings=None):
self.arm_settings = {} # dict is set in set_arm_settings
self.initialize_joints(right_arm_settings, left_arm_settings)
#self.initialize_gripper()
self.left_arm_ft = {'force': np.matrix(np.zeros((3,1),dtype='float32')),
'torque': np.matrix(np.zeros((3,1),dtype='float32'))}
self.right_arm_ft = {'force': np.matrix(np.zeros((3,1),dtype='float32')),
'torque': np.matrix(np.zeros((3,1),dtype='float32'))}
self.fts_bias = {'left_arm': self.left_arm_ft, 'right_arm': self.right_arm_ft}
# kalman filtering force vector. (self.step and bias_wrist_ft)
self.Q_force, self.R_force = {}, {}
self.xhat_force, self.P_force = {}, {}
self.Q_force['right_arm'] = [1e-3, 1e-3, 1e-3]
self.R_force['right_arm'] = [0.2**2, 0.2**2, 0.2**2]
self.xhat_force['right_arm'] = [0., 0., 0.]
self.P_force['right_arm'] = [1.0, 1.0, 1.0]
self.Q_force['left_arm'] = [1e-3, 1e-3, 1e-3]
self.R_force['left_arm'] = [0.2**2, 0.2**2, 0.2**2]
self.xhat_force['left_arm'] = [0., 0., 0.]
self.P_force['left_arm'] = [1.0, 1.0, 1.0]
#----- ROS interface ---------
rospy.init_node('arm_server', anonymous=False)
rospy.Service('toggle_floating_arms', Empty_srv, self.floating_arms_cb)
self.q_r_pub = rospy.Publisher('/r_arm/q', FloatArray)
self.q_l_pub = rospy.Publisher('/l_arm/q', FloatArray)
self.force_raw_r_pub = rospy.Publisher('/r_arm/force_raw', FloatArray)
self.force_raw_l_pub = rospy.Publisher('/l_arm/force_raw', FloatArray)
self.force_r_pub = rospy.Publisher('/r_arm/force', FloatArray)
self.force_l_pub = rospy.Publisher('/l_arm/force', FloatArray)
self.jep_r_pub = rospy.Publisher('/r_arm/jep', FloatArray)
self.jep_l_pub = rospy.Publisher('/l_arm/jep', FloatArray)
self.alph_r_pub = rospy.Publisher('/r_arm/joint_impedance_scale', FloatArray)
self.alph_l_pub = rospy.Publisher('/l_arm/joint_impedance_scale', FloatArray)
self.pwr_state_pub = rospy.Publisher('/arms/pwr_state', Bool)
self.joint_state_pub = rospy.Publisher('/joint_states', JointState)
rospy.Subscriber('/r_arm/command/jep', FloatArray, self.r_jep_cb)
rospy.Subscriber('/l_arm/command/jep', FloatArray, self.l_jep_cb)
rospy.Subscriber('/r_arm/command/joint_impedance_scale', FloatArray, self.r_alpha_cb)
rospy.Subscriber('/l_arm/command/joint_impedance_scale', FloatArray, self.l_alpha_cb)
# publishing to this message will stop the arms but also crash
# the server (since meka server crashes.) Advait Nov 14, 2010
rospy.Subscriber('/arms/stop', Empty, self.stop)
rospy.Subscriber('/arms/command/motors_off', Empty,
self.motors_off)
self.cb_lock = RLock()
self.r_jep = None # see set_jep
self.l_jep = None # see set_jep
self.qr_prev = None # see step_ros
self.ql_prev = None # see step_ros
self.joint_names_list = ac.get_joint_name_dict()
self.floating_arms = False
self.floating_arms_counter = 0
def floating_arms_cb(self, req):
self.floating_arms_counter = 0
self.floating_arms = not self.floating_arms
#rospy.logout('floating_arms_cb called')
return EmptyResponse()
def set_arm_settings(self,right_arm_settings,left_arm_settings):
self.arm_settings['right_arm'] = right_arm_settings
self.arm_settings['left_arm'] = left_arm_settings
for arm,arm_settings in zip(['right_arm','left_arm'],[right_arm_settings,left_arm_settings]):
joint_component_list = self.joint_list_dict[arm]
# OFF mode doesn't seem to work. (Advait, Jan 1 2010)
if arm_settings == None:
for c in joint_component_list:
c.set_control_mode(OFF)
continue
stiffness_list = arm_settings.stiffness_list
if arm_settings.control_mode == 'torque_gc':
print 'setting control mode to torque_gc'
for c in joint_component_list:
c.set_control_mode(TORQUE_GC)
c.set_torque_mNm(0.0)
elif arm_settings.control_mode == 'theta_gc':
print 'setting control mode to theta_gc'
for i in range(5):
joint_component_list[i].set_control_mode(THETA_GC)
joint_component_list[i].set_stiffness(stiffness_list[i])
joint_component_list[i].set_slew_rate_proportion(1.)
joint_component_list[5].set_control_mode(THETA)
joint_component_list[5].set_slew_rate_proportion(1.)
joint_component_list[6].set_control_mode(THETA)
joint_component_list[6].set_slew_rate_proportion(1.)
elif arm_settings.control_mode == 'wrist_theta_gc':
print 'setting control mode to theta_gc include wrist joints'
for i in range(7):
joint_component_list[i].set_control_mode(THETA_GC)
joint_component_list[i].set_stiffness(stiffness_list[i])
joint_component_list[i].set_slew_rate_proportion(1.)
else:
print 'hrl_robot.initialize_joints. unknown control mode for ', arm,':', arm_settings.control_mode
# put a few things into safeop so that individual joint
# components work.
def safeop_things(self):
robot_name = 'm3humanoid_bimanual_mr1'
chain_names = ['m3arm_ma1', 'm3arm_ma2']
dynamatics_nms = ['m3dynamatics_ma1', 'm3dynamatics_ma2']
self.proxy.make_safe_operational(robot_name)
for c in chain_names:
self.proxy.make_safe_operational(c)
for d in dynamatics_nms:
self.proxy.make_safe_operational(d)
def initialize_joints(self, right_arm_settings, left_arm_settings):
self.proxy = m3p.M3RtProxy()
self.proxy.start()
for c in ['m3pwr_pwr003','m3loadx6_ma1_l0','m3arm_ma1','m3loadx6_ma2_l0','m3arm_ma2']:
if not self.proxy.is_component_available(c):
raise m3t.M3Exception('Component '+c+' is not available.')
self.joint_list_dict = {}
right_l = []
for c in ['m3joint_ma1_j0','m3joint_ma1_j1','m3joint_ma1_j2',
'm3joint_ma1_j3','m3joint_ma1_j4','m3joint_ma1_j5',
'm3joint_ma1_j6']:
if not self.proxy.is_component_available(c):
raise m3t.M3Exception('Component '+c+' is not available.')
right_l.append(m3f.create_component(c))
self.joint_list_dict['right_arm'] = right_l
left_l = []
for c in ['m3joint_ma2_j0','m3joint_ma2_j1','m3joint_ma2_j2',
'm3joint_ma2_j3','m3joint_ma2_j4','m3joint_ma2_j5',
'm3joint_ma2_j6']:
if not self.proxy.is_component_available(c):
raise m3t.M3Exception('Component '+c+' is not available.')
left_l.append(m3f.create_component(c))
self.joint_list_dict['left_arm'] = left_l
for arm,arm_settings in zip(['right_arm','left_arm'],[right_arm_settings,left_arm_settings]):
if arm_settings == None:
continue
for comp in self.joint_list_dict[arm]:
self.proxy.subscribe_status(comp)
self.proxy.publish_command(comp)
self.set_arm_settings(right_arm_settings,left_arm_settings)
right_fts=m3.loadx6.M3LoadX6('m3loadx6_ma1_l0')
self.proxy.subscribe_status(right_fts)
left_fts=m3.loadx6.M3LoadX6('m3loadx6_ma2_l0')
self.proxy.subscribe_status(left_fts)
self.fts = {'right_arm':right_fts,'left_arm':left_fts}
#self.pwr=m3w.M3Pwr('m3pwr_pwr003')
self.pwr=m3f.create_component('m3pwr_pwr003')
self.proxy.subscribe_status(self.pwr)
self.proxy.publish_command(self.pwr)
self.arms = {}
self.arms['right_arm']=m3.arm.M3Arm('m3arm_ma1')
self.proxy.subscribe_status(self.arms['right_arm'])
self.arms['left_arm']=m3.arm.M3Arm('m3arm_ma2')
self.proxy.subscribe_status(self.arms['left_arm'])
self.proxy.step()
self.proxy.step()
def initialize_gripper(self):
#self.right_gripper = m3h.M3Gripper('m3gripper_mg0')
self.right_gripper = m3h.M3Gripper('m3gripper_mg1')
self.proxy.publish_command(self.right_gripper)
self.proxy.subscribe_status(self.right_gripper)
def step(self):
self.proxy.step()
for arm in ['left_arm', 'right_arm']:
z = self.get_wrist_force(arm).A1 # Force vector
#if arm == 'right_arm':
# z = self.get_wrist_force_nano().A1
for i in range(3):
xhat, p = kalman_update(self.xhat_force[arm][i],
self.P_force[arm][i],
self.Q_force[arm][i],
self.R_force[arm][i], z[i])
if abs(z[i] - self.xhat_force[arm][i]) > 3.:
xhat = z[i] # not filtering step changes.
self.xhat_force[arm][i] = xhat
self.P_force[arm][i] = p
def step_ros(self):
r_arm = 'right_arm'
l_arm = 'left_arm'
self.cb_lock.acquire()
r_jep = copy.copy(self.r_jep)
l_jep = copy.copy(self.l_jep)
r_alpha = copy.copy(self.arm_settings['right_arm'].stiffness_list)
l_alpha = copy.copy(self.arm_settings['left_arm'].stiffness_list)
self.cb_lock.release()
self.set_jep(r_arm, r_jep)
self.set_jep(l_arm, l_jep)
self.set_alpha(r_arm, r_alpha)
self.set_alpha(l_arm, l_alpha)
self.step()
motor_pwr_state = self.is_motor_power_on()
if not motor_pwr_state:
self.maintain_configuration()
q_r = self.get_joint_angles(r_arm)
q_l = self.get_joint_angles(l_arm)
if self.floating_arms:
if self.qr_prev == None or self.floating_arms_counter < 100:
self.qr_prev = q_r
self.ql_prev = q_l
self.floating_arms_counter += 1
else:
tol = np.radians([5., 2., 10., 2., 10., 0.03, 0.6])
r_arr = np.array(q_r)
l_arr = np.array(q_l)
prev_r_arr = np.array(self.qr_prev)
prev_l_arr = np.array(self.ql_prev)
dqr = np.array(q_r) - np.array(prev_r_arr)
dql = np.array(q_l) - np.array(prev_l_arr)
dqr = dqr * (np.abs(dqr) > tol)
dql = dql * (np.abs(dql) > tol)
r_jep = (np.array(r_jep) + dqr).tolist()
l_jep = (np.array(l_jep) + dql).tolist()
self.cb_lock.acquire()
self.r_jep = copy.copy(r_jep)
self.l_jep = copy.copy(l_jep)
self.cb_lock.release()
change_idxs = np.where(dqr != 0)
prev_r_arr[change_idxs] = r_arr[change_idxs]
change_idxs = np.where(dql != 0)
prev_l_arr[change_idxs] = l_arr[change_idxs]
self.qr_prev = prev_r_arr.tolist()
self.ql_prev = prev_l_arr.tolist()
f_raw_r = self.get_wrist_force(r_arm).A1.tolist()
f_raw_l = self.get_wrist_force(l_arm).A1.tolist()
f_r = self.xhat_force[r_arm]
f_l = self.xhat_force[l_arm]
# publish stuff over ROS.
time_stamp = rospy.Time.now()
h = Header()
h.stamp = time_stamp
self.q_r_pub.publish(FloatArray(h, q_r))
self.q_l_pub.publish(FloatArray(h, q_l))
self.jep_r_pub.publish(FloatArray(h, r_jep))
self.jep_l_pub.publish(FloatArray(h, l_jep))
self.alph_r_pub.publish(FloatArray(h, r_alpha))
self.alph_l_pub.publish(FloatArray(h, l_alpha))
h.frame_id = '/torso_lift_link'
nms = self.joint_names_list['right_arm'] + self.joint_names_list['left_arm']
pos = q_r + q_l
self.joint_state_pub.publish(JointState(h, nms, pos,
[0.]*len(pos), [0.]*len(pos)))
h.frame_id = ar.link_tf_name(r_arm, 7)
self.force_raw_r_pub.publish(FloatArray(h, f_raw_r))
self.force_r_pub.publish(FloatArray(h, f_r))
h.frame_id = ar.link_tf_name(l_arm, 7)
self.force_raw_l_pub.publish(FloatArray(h, f_raw_l))
self.force_l_pub.publish(FloatArray(h, f_l))
self.pwr_state_pub.publish(Bool(motor_pwr_state))
def is_motor_power_on(self):
return self.pwr.is_motor_power_on(None)
# Advait, Aug 8, 2009
# two steps in motors_on and off because with simply one step
# pwr.is_motor_on does not get the correct value. (I think this is
# because at the clock edge when motor on command is sent, the power
# is still off and thus the status is not affected.)
def motors_off(self, msg=None):
self.pwr.set_motor_power_off()
def motors_on(self):
self.maintain_configuration()
self.pwr.set_motor_power_on()
self.step()
self.step()
def maintain_configuration(self):
for arm in ['right_arm','left_arm']:
q = self.get_joint_angles(arm)
if self.arm_settings[arm] == None:
continue
if 'theta_gc' not in self.arm_settings[arm].control_mode:
raise RuntimeError('bad control mode: %s', self.arm_settings[arm].control_mode)
self.set_jep(arm, q)
self.cb_lock.acquire()
if arm == 'right_arm':
self.r_jep = q
else:
self.l_jep = q
self.cb_lock.release()
def power_on(self):
self.maintain_configuration()
self.proxy.make_operational_all()
self.safeop_things()
self.pwr.set_motor_power_on()
self.step()
self.step()
def stop(self, msg=None):
self.pwr.set_motor_power_off()
self.step()
self.proxy.stop()
##3X1 numpy matrix of forces measured by the wrist FT sensor.
#(This is the force that the environment is applying on the wrist)
# @param arm - 'left_arm' or 'right_arm'
# @return in SI units
#coord frame - tool tip coord frame (parallel to the base frame in the home position)
# 2010/2/5 Advait, Aaron King, Tiffany verified that coordinate frame
# from Meka is the left-hand coordinate frame.
def get_wrist_force(self, arm):
m = []
lc = self.fts[arm]
m.append(lc.get_Fx_mN()/1000.)
m.append(lc.get_Fy_mN()/1000.)
m.append(-lc.get_Fz_mN()/1000.)
m = tr.Rz(math.radians(-30.0))*np.matrix(m).T
m[1,0] = -m[1,0]
m[2,0] = -m[2,0]
return m
def get_wrist_force_nano(self):
f = self.r_arm_ftc.read()[0:3, :]
f = tr.Rz(math.radians(-60.)) * f
f[1,0] = f[1,0] * -1
return f
#-------------------- getting and setting joint angles ------------
##
# @param arm - 'left_arm' or 'right_arm'
# @return list of 7 joint accelerations in RADIANS/s^2.
# according to meka's coordinate frames.
def get_joint_accelerations(self, arm):
return self.arms[arm].get_thetadotdot_rad().tolist()
##
# @param arm - 'left_arm' or 'right_arm'
# @return list of 7 joint velocities in RADIANS/s.
# according to meka's coordinate frames.
def get_joint_velocities(self, arm):
return self.arms[arm].get_thetadot_rad().tolist()
def get_joint_angles(self, arm):
''' returns list of 7 joint angles in RADIANS.
according to meka's coordinate frames.
'''
return self.arms[arm].get_theta_rad().tolist()
def l_jep_cb(self, msg):
self.cb_lock.acquire()
self.l_jep = msg.data
self.cb_lock.release()
def r_jep_cb(self, msg):
self.cb_lock.acquire()
self.r_jep = msg.data
self.cb_lock.release()
##
# @param q - list of 7 joint angles in RADIANS. according to meka's coordinate frames.
def set_jep(self, arm, q):
if self.arm_settings[arm] == None:
return
if self.arm_settings[arm].control_mode != 'theta_gc' and \
self.arm_settings[arm].control_mode != 'wrist_theta_gc':
raise RuntimeError('Bad control mode: %s'%(self.arm_settings[arm].control_mode))
for i,qi in enumerate(q):
## NOTE - meka's set_theta_deg takes angle in radians.
#self.joint_list_dict[arm][i].set_theta_deg(qi)
# Not anymore. (Advait Aug 27, 2009)
self.joint_list_dict[arm][i].set_theta_rad(qi)
self.cb_lock.acquire()
if arm == 'right_arm':
self.r_jep = q
else:
self.l_jep = q
self.cb_lock.release()
def r_alpha_cb(self, msg):
self.cb_lock.acquire()
self.arm_settings['right_arm'].set_stiffness_scale(list(msg.data))
self.cb_lock.release()
def l_alpha_cb(self, msg):
self.cb_lock.acquire()
self.arm_settings['left_arm'].set_stiffness_scale(list(msg.data))
self.cb_lock.release()
def set_alpha(self, arm, alpha):
jcl = self.joint_list_dict[arm]
for i,a in enumerate(alpha):
jcl[i].set_stiffness(a)
if __name__ == '__main__':
try:
settings_r = MekaArmSettings(stiffness_list=[0.1939,0.6713,0.748,0.7272,0.75])
#settings_r = None
settings_l = MekaArmSettings(stiffness_list=[0.1939,0.6713,0.748,0.7272,0.75])
#settings_l = None
cody_arms = MekaArmServer(settings_r, settings_l)
# print 'hit a key to power up the arms.'
# k=m3t.get_keystroke()
cody_arms.power_on()
while not rospy.is_shutdown():
cody_arms.step_ros()
rospy.sleep(0.005)
cody_arms.stop()
except m3t.M3Exception:
print '############################################################'
print 'In all likelihood the Meka server is not running.'
print '############################################################'
raise
except:
# only use cody_arms if it was successfully created.
if 'cody_arms' in locals():
cody_arms.stop()
raise
| [
[
1,
0,
0.0558,
0.0017,
0,
0.66,
0,
826,
0,
1,
0,
0,
826,
0,
0
],
[
1,
0,
0.0576,
0.0017,
0,
0.66,
0.0323,
234,
0,
1,
0,
0,
234,
0,
0
],
[
1,
0,
0.0593,
0.0017,
0,
... | [
"import m3.rt_proxy as m3p",
"import m3.arm",
"import m3.toolbox as m3t",
"import m3.pwr as m3w",
"import m3.loadx6",
"import m3.component_factory as m3f",
"import arm_client as ac",
"import math",
"import numpy as np",
"import sys, time, os",
"import copy",
"from threading import RLock",
"i... |
#
# Copyright (c) 2009, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Author: Advait Jain
import math, numpy as np
import arm_client as ac
import arms as ar
import roslib; roslib.load_manifest('epc_core')
import rospy
import tf.broadcaster as tfb
import hrl_lib.transforms as tr
from hrl_msgs.msg import FloatArray
from roslib.msg import Header
from visualization_msgs.msg import Marker
def publish_cartesian_markers(arm, time_stamp, cep, rot, c1, c2, marker_id):
marker = Marker()
marker.header.frame_id = ar.link_tf_name(arm, 0)
marker.header.stamp = time_stamp
marker.ns = arm
marker.type = Marker.ARROW
marker.action = Marker.ADD
marker.pose.position.x = cep[0,0]
marker.pose.position.y = cep[1,0]
marker.pose.position.z = cep[2,0]
marker.scale.x = 0.1
marker.scale.y = 0.2
marker.scale.z = 0.2
marker.lifetime = rospy.Duration()
marker.id = marker_id*100 + 0
#rot1 = tr.Ry(math.radians(90.)) * rot.T
rot1 = rot * tr.rotY(math.pi/2)
quat = tr.matrix_to_quaternion(rot1)
marker.pose.orientation.x = quat[0]
marker.pose.orientation.y = quat[1]
marker.pose.orientation.z = quat[2]
marker.pose.orientation.w = quat[3]
marker.color.r = c1[0]
marker.color.g = c1[1]
marker.color.b = c1[2]
marker.color.a = 1.
marker_pub.publish(marker)
marker.id = marker_id*100 + 1
if arm == 'left_arm':
#rot2 = tr.Rz(math.radians(90.)) * rot.T
rot2 = rot * tr.rotZ(-math.pi/2)
else:
#rot2 = tr.Rz(math.radians(-90.)) * rot.T
rot2 = rot * tr.rotZ(math.pi/2)
quat = tr.matrix_to_quaternion(rot2)
marker.pose.orientation.x = quat[0]
marker.pose.orientation.y = quat[1]
marker.pose.orientation.z = quat[2]
marker.pose.orientation.w = quat[3]
marker.color.r = c2[0]
marker.color.g = c2[1]
marker.color.b = c2[2]
marker.color.a = 1.
marker_pub.publish(marker)
def publish_sphere_marker(color, size, frameid, time_stamp, ns,
marker_id):
marker = Marker()
marker.header.frame_id = frameid
marker.header.stamp = time_stamp
marker.ns = ns
marker.type = Marker.SPHERE
marker.action = Marker.ADD
marker.pose.position.x = 0.
marker.pose.position.y = 0.
marker.pose.position.z = 0.
marker.scale.x = size
marker.scale.y = size
marker.scale.z = size
marker.lifetime = rospy.Duration()
marker.id = marker_id
marker.pose.orientation.x = 0
marker.pose.orientation.y = 0
marker.pose.orientation.z = 0
marker.pose.orientation.w = 1
marker.color.r = color[0]
marker.color.g = color[1]
marker.color.b = color[2]
marker.color.a = 1.
marker_pub.publish(marker)
if __name__ == '__main__':
arms = ar.M3HrlRobot()
arm_client = ac.MekaArmClient(arms)
force_r_pub = rospy.Publisher('/r_arm/force_base', FloatArray)
force_l_pub = rospy.Publisher('/l_arm/force_base', FloatArray)
marker_pub = rospy.Publisher('/cody_arms/viz_marker', Marker)
rospy.logout('Sleeping ...')
rospy.sleep(1.0)
rospy.logout('... begin')
r_arm = 'right_arm'
l_arm = 'left_arm'
transform_bcast = tfb.TransformBroadcaster()
torso_link_name = ar.link_tf_name(r_arm, 0)
while not rospy.is_shutdown():
rospy.sleep(0.1)
f_r = arm_client.get_wrist_force(r_arm, base_frame=True)
f_l = arm_client.get_wrist_force(l_arm, base_frame=True)
time_stamp = rospy.Time.now()
h = Header()
h.stamp = time_stamp
force_r_pub.publish(FloatArray(h, f_r))
force_l_pub.publish(FloatArray(h, f_l))
publish_sphere_marker((0.5,0.5,0.5), 0.08, torso_link_name,
time_stamp, 'both_arms', 0)
for arm in [r_arm, l_arm]:
q = arm_client.get_joint_angles(arm)
links = [2, 3, 7]
for i in links:
p, rot = arms.FK_all(arm, q, i)
qaut = tr.matrix_to_quaternion(rot)
frameid = ar.link_tf_name(arm, i)
transform_bcast.sendTransform(p.A1.tolist(), qaut, time_stamp,
frameid, torso_link_name)
publish_sphere_marker((0.5,0.1,0.5), 0.05, frameid,
time_stamp, arm, i)
c1 = (0.5, 0.1, 0.5)
c2 = (0.5, 0.5, 0.1)
p, rot = arms.FK_all(arm, q)
publish_cartesian_markers(arm, time_stamp, p, rot, c1, c2,
marker_id=76)
c1 = (0.2, 0.2, 0.2)
c2 = (0.6, 0.6, 0.6)
jep = arm_client.get_jep(arm)
jep = arms.clamp_to_physical_joint_limits(arm, jep)
cep, rot = arms.FK_all(arm, jep)
publish_cartesian_markers(arm, time_stamp, cep, rot, c1, c2,
marker_id = 77)
| [
[
1,
0,
0.1694,
0.0055,
0,
0.66,
0,
526,
0,
2,
0,
0,
526,
0,
0
],
[
1,
0,
0.1803,
0.0055,
0,
0.66,
0.0769,
435,
0,
1,
0,
0,
435,
0,
0
],
[
1,
0,
0.1858,
0.0055,
0,
... | [
"import math, numpy as np",
"import arm_client as ac",
"import arms as ar",
"import roslib; roslib.load_manifest('epc_core')",
"import roslib; roslib.load_manifest('epc_core')",
"import rospy",
"import tf.broadcaster as tfb",
"import hrl_lib.transforms as tr",
"from hrl_msgs.msg import FloatArray",
... |
#
# Copyright (c) 2009, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Author: Advait Jain
import math
import numpy as np
import copy
import sys, time, os
import PyKDL as kdl
import create_IK_guess_dict as cgd
import roslib; roslib.load_manifest('epc_core')
import hrl_lib.transforms as tr
import hrl_lib.util as ut
import hrl_lib.kdl_utils as ku
#-------------- TF stuff ---------------
def link_tf_name(arm, link_number):
if arm == 'right_arm':
nm = 'r_arm'
else:
nm = 'l_arm'
if link_number == 0:
nm = 'torso_link'
elif link_number == 7:
nm = nm + '_ee'
else:
nm = nm + '_' + str(link_number)
return nm
class M3HrlRobot():
def __init__(self, end_effector_length):
# create joint limit dicts
self.joint_lim_dict = {}
self.joint_lim_dict['right_arm'] = {'max': np.radians([ 120.00, 122.15, 77.5, 144., 122., 45., 45.]),
'min': np.radians([ -47.61, -20., -77.5, 0., -80., -45., -45.])}
self.joint_lim_dict['left_arm'] = {'max': np.radians([ 120.00, 20., 77.5, 144., 80., 45., 45.]),
'min': np.radians([ -47.61, -122.15, -77.5, 0., -122., -45., -45.])}
end_effector_length += 0.0135 + 0.04318 # add wrist linkange and FT sensor lengths
self.setup_kdl_mekabot(end_effector_length)
q_guess_pkl_l = os.environ['HOME']+'/svn/gt-ros-pkg/hrl/equilibrium_point_control/epc_core/src/cody_arms/q_guess_left_dict.pkl'
q_guess_pkl_r = os.environ['HOME']+'/svn/gt-ros-pkg/hrl/equilibrium_point_control/epc_core/src/cody_arms/q_guess_right_dict.pkl'
self.q_guess_dict_left = ut.load_pickle(q_guess_pkl_l)
self.q_guess_dict_right = ut.load_pickle(q_guess_pkl_r)
# KDL joint array to meka joint list. (7->7)
# arm - 'left_arm' or 'right_arm'
def kdl_angles_to_meka(self, arm, q_jnt_arr):
if q_jnt_arr == None:
return None
q_rad = [0. for i in range(7)]
q_rad[0] = -q_jnt_arr[0]
q_rad[1] = -q_jnt_arr[1]
q_rad[2] = -q_jnt_arr[2]
q_rad[3] = -q_jnt_arr[3]
q_rad[4] = -q_jnt_arr[4]
q_rad[5] = -q_jnt_arr[5]
q_rad[6] = -q_jnt_arr[6]
return q_rad
# meka joint list to KDL joint array (7->7)
# arm - 'left_arm' or 'right_arm'
def meka_angles_to_kdl(self,arm,q_list):
if q_list == None:
return None
n_joints = len(q_list)
q = kdl.JntArray(n_joints)
q[0] = -q_list[0]
q[1] = -q_list[1]
q[2] = -q_list[2]
if n_joints > 3:
q[3] = -q_list[3]
if n_joints == 7:
q[4] = -q_list[4]
q[5] = -q_list[5]
q[6] = -q_list[6]
return q
def create_right_chain(self, end_effector_length):
ch = kdl.Chain()
ch.addSegment(kdl.Segment(kdl.Joint(kdl.Joint.RotY),kdl.Frame(kdl.Vector(0.,-0.18493,0.))))
ch.addSegment(kdl.Segment(kdl.Joint(kdl.Joint.RotX),kdl.Frame(kdl.Vector(0.,-0.03175,0.))))
ch.addSegment(kdl.Segment(kdl.Joint(kdl.Joint.RotZ),kdl.Frame(kdl.Vector(0.00635,0.,-0.27795))))
ch.addSegment(kdl.Segment(kdl.Joint(kdl.Joint.RotY),kdl.Frame(kdl.Vector(0.,0.,-0.27853))))
ch.addSegment(kdl.Segment(kdl.Joint(kdl.Joint.RotZ),kdl.Frame(kdl.Vector(0.,0.,0.))))
ch.addSegment(kdl.Segment(kdl.Joint(kdl.Joint.RotY),kdl.Frame(kdl.Vector(0.,0.,0.))))
ch.addSegment(kdl.Segment(kdl.Joint(kdl.Joint.RotX),kdl.Frame(kdl.Vector(0.,0.,-end_effector_length))))
return ch
def create_left_chain(self, end_effector_length):
ch = kdl.Chain()
ch.addSegment(kdl.Segment(kdl.Joint(kdl.Joint.RotY),kdl.Frame(kdl.Vector(0.,0.18493,0.))))
ch.addSegment(kdl.Segment(kdl.Joint(kdl.Joint.RotX),kdl.Frame(kdl.Vector(0.,0.03175,0.))))
ch.addSegment(kdl.Segment(kdl.Joint(kdl.Joint.RotZ),kdl.Frame(kdl.Vector(0.00635,0.,-0.27795))))
ch.addSegment(kdl.Segment(kdl.Joint(kdl.Joint.RotY),kdl.Frame(kdl.Vector(0.,0.,-0.27853))))
ch.addSegment(kdl.Segment(kdl.Joint(kdl.Joint.RotZ),kdl.Frame(kdl.Vector(0.,0.,0.))))
ch.addSegment(kdl.Segment(kdl.Joint(kdl.Joint.RotY),kdl.Frame(kdl.Vector(0.,0.,0.))))
ch.addSegment(kdl.Segment(kdl.Joint(kdl.Joint.RotX),kdl.Frame(kdl.Vector(0.,0.,-end_effector_length))))
return ch
def create_solvers(self, ch):
fk = kdl.ChainFkSolverPos_recursive(ch)
ik_v = kdl.ChainIkSolverVel_pinv(ch)
ik_p = kdl.ChainIkSolverPos_NR(ch, fk, ik_v)
jac = kdl.ChainJntToJacSolver(ch)
return fk, ik_v, ik_p, jac
def setup_kdl_mekabot(self, end_effector_length):
#right arm
ch = self.create_right_chain(end_effector_length)
fk, ik_v, ik_p, jac = self.create_solvers(ch)
kdl_rightarm = {}
kdl_rightarm['chain'] = ch
kdl_rightarm['nJnts'] = ch.getNrOfJoints()
kdl_rightarm['fk_p'] = fk
kdl_rightarm['ik_v'] = ik_v
kdl_rightarm['ik_p'] = ik_p
kdl_rightarm['jacobian_solver'] = jac
#left arm
kdl_leftarm = {}
ch = self.create_left_chain(end_effector_length)
fk, ik_v, ik_p, jac = self.create_solvers(ch)
kdl_leftarm['chain'] = ch
kdl_leftarm['nJnts'] = ch.getNrOfJoints()
kdl_leftarm['fk_p'] = fk
kdl_leftarm['ik_v'] = ik_v
kdl_leftarm['ik_p'] = ik_p
kdl_leftarm['jacobian_solver'] = jac
#Add both chains to dictionary
self.cody_kdl = {'right_arm':kdl_rightarm,'left_arm':kdl_leftarm}
def FK_kdl(self, arm, q, link_number):
fk_solver = self.cody_kdl[arm]['fk_p']
endeffec_frame = kdl.Frame()
kinematics_status = fk_solver.JntToCart(q, endeffec_frame,
link_number)
if kinematics_status >= 0:
# print 'End effector transformation matrix:', endeffec_frame
return endeffec_frame
else:
print 'Could not compute forward kinematics.'
return None
def Jac_kdl(self,arm,q):
''' returns the Jacobian, given the joint angles
'''
J_kdl = kdl.Jacobian(7)
self.cody_kdl[arm]['jacobian_solver'].JntToJac(q,J_kdl)
kdl_jac = np.matrix([
[J_kdl[0,0],J_kdl[0,1],J_kdl[0,2],J_kdl[0,3],J_kdl[0,4],J_kdl[0,5],J_kdl[0,6]],
[J_kdl[1,0],J_kdl[1,1],J_kdl[1,2],J_kdl[1,3],J_kdl[1,4],J_kdl[1,5],J_kdl[1,6]],
[J_kdl[2,0],J_kdl[2,1],J_kdl[2,2],J_kdl[2,3],J_kdl[2,4],J_kdl[2,5],J_kdl[2,6]],
[J_kdl[3,0],J_kdl[3,1],J_kdl[3,2],J_kdl[3,3],J_kdl[3,4],J_kdl[3,5],J_kdl[3,6]],
[J_kdl[4,0],J_kdl[4,1],J_kdl[4,2],J_kdl[4,3],J_kdl[4,4],J_kdl[4,5],J_kdl[4,6]],
[J_kdl[5,0],J_kdl[5,1],J_kdl[5,2],J_kdl[5,3],J_kdl[5,4],J_kdl[5,5],J_kdl[5,6]],
])
return kdl_jac
def IK_kdl(self,arm,frame, q_init):
''' IK, returns jointArray (None if impossible)
frame - desired frame of the end effector
q_init - initial guess for the joint angles. (JntArray)
'''
nJnts = self.cody_kdl[arm]['nJnts']
ik_solver = self.cody_kdl[arm]['ik_p']
q = kdl.JntArray(nJnts)
if ik_solver.CartToJnt(q_init,frame,q)>=0:
for i in range(nJnts):
q[i] = tr.angle_within_mod180(q[i])
return q
else:
if arm == 'right_arm':
ik_solver = self.cody_kdl[arm]['ik_p_nolim']
if ik_solver.CartToJnt(q_init,frame,q)>=0:
for i in range(nJnts):
q[i] = tr.angle_within_mod180(q[i])
return q
print 'Error: could not calculate inverse kinematics'
return None
def FK_rot(self, arm, q, link_number = 7):
pos, rot = self.FK_all(arm, q, link_number)
return rot
# @param arm - 'left_arm' or 'right_arm'
# @param q - list of 7 joint angles (RADIANs)
# @param link_number - perform FK up to this link. (1-7)
# @return 3x1 numpy matrix
def FK(self, arm, q, link_number = 7):
pos, rot = self.FK_all(arm, q, link_number)
return pos
def FK_all(self, arm, q, link_number = 7):
q = self.meka_angles_to_kdl(arm, q)
frame = self.FK_kdl(arm, q, link_number)
pos = frame.p
pos = ku.kdl_vec_to_np(pos)
m = frame.M
rot = ku.kdl_rot_to_np(m)
return pos, rot
def Jac(self,arm,q):
''' q - list of 7 joint angles (meka axes) in RADIANS.
arm - 'right_arm' or 'left_arm'
returns 6x7 numpy matrix.
'''
jntarr = self.meka_angles_to_kdl(arm,q)
kdl_jac = self.Jac_kdl(arm,jntarr)
meka_jac = -kdl_jac # the kdl jacobian is the negative of meka jacobian (see kdl_angles_to_meka)
return meka_jac
## compute Jacobian at point pos.
# p is in the torso_lift_link coord frame.
def Jacobian(self, arm, q, pos):
chain = self.cody_kdl[arm]['chain']
v_list = []
w_list = []
for i in range(7):
p, rot = self.FK_all(arm, q, i)
r = pos - p
z_idx = chain.getSegment(i).getJoint().getType() - 1
z = rot[:, z_idx]
v_list.append(np.matrix(np.cross(z.A1, r.A1)).T)
w_list.append(z)
J = np.row_stack((np.column_stack(v_list), np.column_stack(w_list)))
#J = -J # the kdl jacobian is the negative of meka jacobian (see kdl_angles_to_meka)
J = self.Jac(arm, q)
return J
##
# Inverse Kinematics using KDL.
# @param p - 3X1 numpy matrix.
# @param rot - 3X3 numpy matrix. It transforms a vector in the
# end effector frame to the torso frame. (or it is the orientation
# of the end effector wrt the torso)
# @return list of 7 joint angles, or None if IK soln not found.
def IK(self,arm,p,rot,q_guess=None):
p_kdl = ku.np_vec_to_kdl(p)
rot_kdl = ku.np_rot_to_kdl(rot)
fr = kdl.Frame(rot_kdl,p_kdl)
if q_guess == None:
if arm == 'left_arm':
q_guess = cgd.find_good_config(p,self.q_guess_dict_left)
elif arm == 'right_arm':
q_guess = cgd.find_good_config(p,self.q_guess_dict_right)
q_guess = self.meka_angles_to_kdl(arm,q_guess)
q_res = self.IK_kdl(arm,fr,q_guess)
q_res = self.kdl_angles_to_meka(arm,q_res)
if self.within_joint_limits(arm,q_res):
if arm == 'right_arm':
if q_res[1]<0.:
q_res[1] = math.radians(10.)
qg = self.meka_angles_to_kdl(arm,q_res)
q_res = self.IK_kdl(arm,fr,qg)
q_res = self.kdl_angles_to_meka(arm,q_res)
if self.within_joint_limits(arm,q_res):
return q_res
else:
return None
else:
return q_res
else:
return None
## clamp joint angles to their physical limits.
# @param arm - 'left_arm' or 'right_arm'
# @param q - list of 7 joint angles.
# The joint limits for IK are larger that the physical limits.
def clamp_to_joint_limits(self, arm, q, delta_list=[0.,0.,0.,0.,0.,0.,0.]):
d = self.joint_lim_dict[arm]
max_arr = d['max']
min_arr = d['min']
q_arr = np.array(q)
d_arr = np.array(delta_list)
return np.clip(q_arr, min_arr-d_arr, max_arr+d_arr)
def within_joint_limits(self, arm, q, delta_list=[0.,0.,0.,0.,0.,0.,0.]):
d = self.joint_lim_dict[arm]
max_arr = d['max']
min_arr = d['min']
q_arr = np.array(q)
d_arr = np.array(delta_list)
return np.all((q_arr <= max_arr+d_arr, q_arr >= min_arr-d_arr))
| [
[
1,
0,
0.0934,
0.003,
0,
0.66,
0,
526,
0,
1,
0,
0,
526,
0,
0
],
[
1,
0,
0.0964,
0.003,
0,
0.66,
0.0833,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.0994,
0.003,
0,
0.6... | [
"import math",
"import numpy as np",
"import copy",
"import sys, time, os",
"import PyKDL as kdl",
"import create_IK_guess_dict as cgd",
"import roslib; roslib.load_manifest('epc_core')",
"import roslib; roslib.load_manifest('epc_core')",
"import hrl_lib.transforms as tr",
"import hrl_lib.util as ... |
#
# Copyright (c) 2009, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Author: Advait Jain
import arms as ar
import math, numpy as np
import sys, optparse
import roslib; roslib.load_manifest('epc_core')
import hrl_lib.transforms as tr
import hrl_lib.util as ut
def find_good_config(pt,dict):
''' finds a good configuration for the 3x1 pt.
'''
m = dict['cart_pts_mat']
min_idx = np.argmin(ut.norm(m-pt))
c = dict['good_configs_list'][min_idx]
# print 'good configuration:', [math.degrees(qi) for qi in c]
return c
def test_dict(fname):
dict = ut.load_pickle(fname)
firenze = ar.M3HrlRobot(connect=False)
rot = tr.rotY(math.radians(-90))
p = np.matrix([0.4,-0.42,-0.2]).T
c = find_good_config(p,dict)
res = firenze.IK(p,rot,q_guess=c)
print 'IK soln: ', [math.degrees(qi) for qi in res]
def create_dict(fname):
firenze = ar.M3HrlRobot(connect=False)
good_configs_list = ut.load_pickle(fname)
cartesian_points_list = []
for gc in good_configs_list:
cartesian_points_list.append(firenze.FK('right_arm',gc).A1.tolist())
m = np.matrix(cartesian_points_list).T
print 'm.shape:', m.shape
dict = {'cart_pts_mat':m, 'good_configs_list':good_configs_list}
ut.save_pickle(dict,ut.formatted_time()+'_goodconf_dict.pkl')
def record_good_configs(use_left_arm):
import m3.toolbox as m3t
settings_arm = ar.MekaArmSettings(stiffness_list=[0.,0.,0.,0.,0.],control_mode='torque_gc')
if use_left_arm:
firenze = ar.M3HrlRobot(connect=True,left_arm_settings=settings_arm)
arm = 'left_arm'
else:
firenze = ar.M3HrlRobot(connect=True,right_arm_settings=settings_arm)
arm = 'right_arm'
print 'hit ENTER to start the recording process'
k=m3t.get_keystroke()
firenze.power_on()
good_configs_list = []
while k == '\r':
print 'hit ENTER to record configuration, something else to exit'
k=m3t.get_keystroke()
firenze.proxy.step()
q = firenze.get_joint_angles(arm)
good_configs_list.append(np.matrix(q).A1.tolist())
firenze.stop()
ut.save_pickle(good_configs_list,ut.formatted_time()+'_good_configs_list.pkl')
if __name__=='__main__':
p = optparse.OptionParser()
p.add_option('-r', action='store_true', dest='record',
help='put robot in GC and record good configurations.')
p.add_option('-c', action='store_true', dest='create',
help='create table to map points to good configs. (needs a good_configs pkl)')
p.add_option('-t', action='store_true', dest='test',
help='find a good config for a cartesian point. (needs a dict pkl)')
p.add_option('-f', action='store', type='string', dest='fname',
help='pkl file to use.', default='')
p.add_option('--ra', action='store_true', dest='right',
help='choose the right arm')
p.add_option('--la', action='store_true', dest='left',
help='choose the left arm')
opt, args = p.parse_args()
record = opt.record
create = opt.create
test = opt.test
fname = opt.fname
use_left_arm = opt.left
use_right_arm = opt.right
if test:
if fname == '':
print 'Specify a file name.'
sys.exit()
test_dict(fname)
elif create:
if fname == '':
print 'Specify a file name.'
sys.exit()
create_dict(fname)
elif record:
if use_right_arm == None and use_left_arm == None:
print 'Please specify either left or right arm. (--ra or --la)'
print 'Exiting...'
sys.exit()
record_good_configs(use_left_arm)
else:
print 'Please specify either record or create.'
| [
[
1,
0,
0.2098,
0.007,
0,
0.66,
0,
375,
0,
1,
0,
0,
375,
0,
0
],
[
1,
0,
0.2168,
0.007,
0,
0.66,
0.0909,
526,
0,
2,
0,
0,
526,
0,
0
],
[
1,
0,
0.2238,
0.007,
0,
0.6... | [
"import arms as ar",
"import math, numpy as np",
"import sys, optparse",
"import roslib; roslib.load_manifest('epc_core')",
"import roslib; roslib.load_manifest('epc_core')",
"import hrl_lib.transforms as tr",
"import hrl_lib.util as ut",
"def find_good_config(pt,dict):\n ''' finds a good configura... |
#
# subscribe to thw joint angles and raw forces topics, and provide FK
# etc.
#
# Copyright (c) 2009, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Author: Advait Jain
import math
import numpy as np
import copy
import sys, time, os
from threading import RLock
import roslib; roslib.load_manifest('epc_core')
import rospy
import hrl_lib.viz as hv
from hrl_msgs.msg import FloatArray
from roslib.msg import Header
from std_msgs.msg import Bool
from std_msgs.msg import Empty
from std_srvs.srv import Empty as Empty_srv
from visualization_msgs.msg import Marker
# used in client and server.
def get_joint_name_dict():
joint_names_list = {}
joint_names_list['right_arm'] = ['m3joint_ma1_j%d'%i for i in range(7)]
joint_names_list['left_arm'] = ['m3joint_ma2_j%d'%i for i in range(7)]
return joint_names_list
class MekaArmClient():
##
# @param arms: object of the ArmKinematics class.
def __init__(self, arms):
self.cb_lock = RLock()
self.r_arm_jep = None
self.l_arm_jep = None
self.r_arm_alpha = None
self.l_arm_alpha = None
self.r_arm_q = None
self.l_arm_q = None
self.r_arm_force = None
self.r_arm_raw_force = None
self.l_arm_force = None
self.l_arm_raw_force = None
self.pwr_state = False
self.left_arm_ft = {'force': np.matrix(np.zeros((3,1),dtype='float32')),
'torque': np.matrix(np.zeros((3,1),dtype='float32'))}
self.right_arm_ft = {'force': np.matrix(np.zeros((3,1),dtype='float32')),
'torque': np.matrix(np.zeros((3,1),dtype='float32'))}
self.fts_bias = {'left_arm': self.left_arm_ft, 'right_arm': self.right_arm_ft}
self.arms = arms
self.joint_names_list = get_joint_name_dict()
self.r_jep_cmd_pub = rospy.Publisher('/r_arm/command/jep', FloatArray)
self.l_jep_cmd_pub = rospy.Publisher('/l_arm/command/jep', FloatArray)
self.r_alph_cmd_pub = rospy.Publisher('/r_arm/command/joint_impedance_scale', FloatArray)
self.l_alph_cmd_pub = rospy.Publisher('/l_arm/command/joint_impedance_scale', FloatArray)
self.stop_pub = rospy.Publisher('/arms/stop', Empty)
self.motors_off_pub = rospy.Publisher('/arms/command/motors_off', Empty)
self.cep_marker_pub = rospy.Publisher('/arms/viz/cep', Marker)
rospy.Subscriber('/r_arm/jep', FloatArray, self.r_arm_jep_cb)
rospy.Subscriber('/l_arm/jep', FloatArray, self.l_arm_jep_cb)
rospy.Subscriber('/r_arm/joint_impedance_scale', FloatArray, self.r_arm_alpha_cb)
rospy.Subscriber('/l_arm/joint_impedance_scale', FloatArray, self.l_arm_alpha_cb)
rospy.Subscriber('/r_arm/q', FloatArray, self.r_arm_q_cb)
rospy.Subscriber('/l_arm/q', FloatArray, self.l_arm_q_cb)
rospy.Subscriber('/r_arm/force', FloatArray, self.r_arm_force_cb)
rospy.Subscriber('/l_arm/force', FloatArray, self.l_arm_force_cb)
rospy.Subscriber('/r_arm/force_raw', FloatArray, self.r_arm_raw_force_cb)
rospy.Subscriber('/l_arm/force_raw', FloatArray, self.l_arm_raw_force_cb)
rospy.Subscriber('/arms/pwr_state', Bool, self.pwr_state_cb)
rospy.wait_for_service('toggle_floating_arms')
self.toggle_floating_arms = rospy.ServiceProxy('toggle_floating_arms', Empty_srv)
try:
rospy.init_node('cody_arm_client', anonymous=True)
except rospy.ROSException:
pass
#---------- ROS callbacks -----------------
def pwr_state_cb(self, msg):
self.cb_lock.acquire()
self.pwr_state = msg.data
self.cb_lock.release()
def r_arm_jep_cb(self, msg):
self.cb_lock.acquire()
self.r_arm_jep = list(msg.data)
self.cb_lock.release()
# publish the CEP marker.
cep, r = self.arms.FK_all('right_arm', self.r_arm_jep)
o = np.matrix([0.,0.,0.,1.]).T
cep_marker = hv.single_marker(cep, o, 'sphere',
'/torso_lift_link', color=(0., 0., 1., 1.),
scale = (0.02, 0.02, 0.02), duration=0.)
cep_marker.header.stamp = msg.header.stamp
self.cep_marker_pub.publish(cep_marker)
def l_arm_jep_cb(self, msg):
self.cb_lock.acquire()
self.l_arm_jep = list(msg.data)
self.cb_lock.release()
def r_arm_alpha_cb(self, msg):
self.cb_lock.acquire()
self.r_arm_alpha = list(msg.data)
self.cb_lock.release()
def l_arm_alpha_cb(self, msg):
self.cb_lock.acquire()
self.l_arm_alpha = list(msg.data)
self.cb_lock.release()
def r_arm_q_cb(self, msg):
self.cb_lock.acquire()
self.r_arm_q = list(msg.data)
self.cb_lock.release()
def l_arm_q_cb(self, msg):
self.cb_lock.acquire()
self.l_arm_q = list(msg.data)
self.cb_lock.release()
def r_arm_force_cb(self, msg):
self.cb_lock.acquire()
self.r_arm_force = msg.data
self.cb_lock.release()
def l_arm_force_cb(self, msg):
self.cb_lock.acquire()
self.l_arm_force = msg.data
self.cb_lock.release()
def r_arm_raw_force_cb(self, msg):
self.cb_lock.acquire()
self.r_arm_raw_force = msg.data
self.cb_lock.release()
def l_arm_raw_force_cb(self, msg):
self.cb_lock.acquire()
self.l_arm_raw_force = msg.data
self.cb_lock.release()
#--------- functions to use -----------------
## Returns the current position, rotation of the arm.
# @param arm 0 for right, 1 for left
# @return position, rotation
def end_effector_pos(self, arm):
q = self.get_joint_angles(arm)
return self.arms.FK_all(arm, q)
##
# @return list of 7 joint angles.
def get_joint_angles(self, arm):
self.cb_lock.acquire()
if arm == 'right_arm':
q = copy.copy(self.r_arm_q)
elif arm == 'left_arm':
q = copy.copy(self.l_arm_q)
else:
self.cb_lock.release()
raise RuntimeError('Undefined arm: %s'%(arm))
self.cb_lock.release()
return q
def get_wrist_force(self, arm, bias=True, base_frame=False,
filtered = True):
self.cb_lock.acquire()
if arm == 'right_arm':
if filtered:
f = copy.copy(self.r_arm_force)
else:
f = copy.copy(self.r_arm_raw_force)
elif arm == 'left_arm':
if filtered:
f = copy.copy(self.l_arm_force)
else:
f = copy.copy(self.l_arm_raw_force)
else:
self.cb_lock.release()
raise RuntimeError('Undefined arm: %s'%(arm))
self.cb_lock.release()
f_mat = np.matrix(f).T
if bias:
f_mat = f_mat - self.fts_bias[arm]['force']
if base_frame:
q = self.get_joint_angles(arm)
rot = self.arms.FK_rot(arm, q)
f_mat = rot*f_mat
return f_mat
def bias_wrist_ft(self, arm):
f_list = []
t_list = []
print 'Starting biasing...'
for i in range(20):
f_list.append(self.get_wrist_force(arm, bias=False))
rospy.sleep(0.02)
f_b = np.mean(np.column_stack(f_list), 1)
# torque is unimplemented.
t_b = self.get_wrist_torque(arm, bias=False)
self.fts_bias[arm]['force'] = f_b
self.fts_bias[arm]['torque'] = t_b
print 'self.fts_bias[arm][\'force\']', self.fts_bias[arm]['force']
print 'arm:', arm
print '...done'
##
# @return list of floats b/w 0 and 1.
def get_impedance_scale(self, arm):
self.cb_lock.acquire()
if arm == 'right_arm':
sc = copy.copy(self.r_arm_alpha)
elif arm == 'left_arm':
sc = copy.copy(self.l_arm_alpha)
else:
self.cb_lock.release()
raise RuntimeError('Undefined arm: %s'%(arm))
self.cb_lock.release()
return sc
##
# @param s - list of floats b/w 0 and 1.
def set_impedance_scale(self, arm, s):
if arm == 'right_arm':
pub = self.r_alph_cmd_pub
elif arm == 'left_arm':
pub = self.l_alph_cmd_pub
else:
raise RuntimeError('Undefined arm: %s'%(arm))
time_stamp = rospy.Time.now()
h = Header()
h.stamp = time_stamp
pub.publish(FloatArray(h, s))
def get_jep(self, arm):
self.cb_lock.acquire()
if arm == 'right_arm':
jep = copy.copy(self.r_arm_jep)
elif arm == 'left_arm':
jep = copy.copy(self.l_arm_jep)
else:
self.cb_lock.release()
raise RuntimeError('Undefined arm: %s'%(arm))
self.cb_lock.release()
return jep
##
# @param q - list of 7 joint angles in RADIANS. according to meka's coordinate frames.
# @param duration - for compatibility with the PR2 class.
def set_jep(self, arm, q, duration=None):
if arm == 'right_arm':
pub = self.r_jep_cmd_pub
elif arm == 'left_arm':
pub = self.l_jep_cmd_pub
else:
raise RuntimeError('Undefined arm: %s'%(arm))
time_stamp = rospy.Time.now()
h = Header()
h.stamp = time_stamp
pub.publish(FloatArray(h, q))
##
#Function that commands the arm(s) to incrementally move to a jep
#@param speed the max angular speed (in radians per second)
#@return 'reach'
def go_jep(self, arm, q, stopping_function=None, speed=math.radians(30)):
if speed>math.radians(90.):
speed = math.radians(90.)
qs_list,qe_list,ns_list,qstep_list = [],[],[],[]
done_list = []
time_between_cmds = 0.025
#qs = np.matrix(self.get_joint_angles(arm))
qs = np.matrix(self.get_jep(arm))
qe = np.matrix(q)
max_change = np.max(np.abs(qe-qs))
total_time = max_change/speed
n_steps = int(total_time/time_between_cmds+0.5)
qstep = (qe-qs)/n_steps
if stopping_function != None:
done = stopping_function()
else:
done = False
step_number = 0
t0 = rospy.Time.now().to_time()
t_end = t0
while done==False:
t_end += time_between_cmds
t1 = rospy.Time.now().to_time()
if stopping_function != None:
done = stopping_function()
if step_number < n_steps and done == False:
q = (qs + step_number*qstep).A1.tolist()
self.set_jep(arm, q)
else:
done = True
while t1 < t_end:
if stopping_function != None:
done = done or stopping_function()
rospy.sleep(time_between_cmds/5)
t1 = rospy.Time.now().to_time()
step_number += 1
rospy.sleep(time_between_cmds)
return 'reach'
# Expect this to crash the program because sending a stop crashes
# the meka server
def stop(self):
self.stop_pub.publish()
def is_motor_power_on(self):
return self.pwr_state
def go_cep(self, arm, p, rot, speed = 0.10,
stopping_function = None, q_guess = None):
q = self.arms.IK(arm, p, rot, q_guess)
if q == None:
print 'IK soln NOT found.'
print 'trying to reach p= ', p
return 'fail'
else:
q_start = np.matrix(self.get_joint_angles(arm))
p_start = self.arms.FK(arm, q_start.A1.tolist())
q_end = np.matrix(q)
dist = np.linalg.norm(p-p_start)
total_time = dist/speed
max_change = np.max(np.abs(q_end-q_start))
ang_speed = max_change/total_time
return self.go_jep(arm, q, stopping_function, speed=ang_speed)
##
# linearly interpolates the commanded cep.
# @param arm - 'left_arm' or 'right_arm'
# @param p - 3x1 np matrix
# @param rot - rotation matrix
# @param speed - linear speed (m/s)
# @param stopping_function - returns True or False
# @return string (reason for stopping)
def go_cep_interpolate(self, arm, p, rot=None, speed=0.10,
stopping_function=None):
rot = None # Rotational interpolation not implemented right now.
time_between_cmds = 0.025
q_guess = self.get_jep(arm)
cep = self.arms.FK(arm, q_guess)
if rot == None:
rot = self.arms.FK_rot(arm, q_guess)
vec = p-cep
dist = np.linalg.norm(vec)
total_time = dist/speed
n_steps = int(total_time/time_between_cmds + 0.5)
vec = vec/dist
vec = vec * speed * time_between_cmds
pt = cep
all_done = False
i = 0
t0 = rospy.Time.now().to_time()
t_end = t0
while all_done==False:
t_end += time_between_cmds
t1 = rospy.Time.now().to_time()
pt = pt + vec
q = self.arms.IK(arm, pt, rot, q_guess)
if q == None:
all_done = True
stop = 'IK fail'
continue
self.set_jep(arm, q)
q_guess = q
while t1<t_end:
if stopping_function != None:
all_done = stopping_function()
if all_done:
stop = 'Stopping Condition'
break
rospy.sleep(time_between_cmds/5)
t1 = rospy.Time.now().to_time()
i+=1
if i == n_steps:
all_done = True
stop = ''
return stop
##
# @param vec - displacement vector (base frame)
# @param q_guess - previous JEP?
# @return string
def move_till_hit(self, arm, vec=np.matrix([0.3,0.,0.]).T, force_threshold=3.0,
speed=0.1, bias_FT=True):
unit_vec = vec/np.linalg.norm(vec)
def stopping_function():
force = self.get_wrist_force(arm, base_frame = True)
force_projection = force.T*unit_vec *-1 # projection in direction opposite to motion.
if force_projection>force_threshold:
return True
elif np.linalg.norm(force)>45.:
return True
else:
return False
jep = self.get_jep(arm)
cep, rot = self.arms.FK_all(arm, jep)
if bias_FT:
self.bias_wrist_ft(arm)
time.sleep(0.5)
p = cep + vec
return self.go_cep_interpolate(arm, p, rot, speed,
stopping_function)
def motors_off(self):
self.motors_off_pub.publish()
# def step(self):
# rospy.sleep(0.01)
#-------- unimplemented functions -----------------
# leaving this unimplemented for now. Advait Nov 14, 2010.
def get_joint_velocities(self, arm):
pass
# leaving this unimplemented for now. Advait Nov 14, 2010.
def get_joint_accelerations(self, arm):
pass
# leaving this unimplemented for now. Advait Nov 14, 2010.
def get_joint_torques(self, arm):
pass
# leaving this unimplemented for now. Advait Nov 14, 2010.
def get_wrist_torque(self, arm, bias=True):
pass
# leaving this unimplemented for now. Advait Nov 14, 2010.
def power_on(self):
pass
# leaving this unimplemented for now. Advait Nov 14, 2010.
# something to change and get arm_settings.
if __name__ == '__main__':
import arms as ar
import m3.toolbox as m3t
import hrl_lib.transforms as tr
r_arm = 'right_arm'
l_arm = 'left_arm'
arms = ar.M3HrlRobot()
ac = MekaArmClient(arms)
# print FT sensor readings.
if False:
ac.bias_wrist_ft(r_arm)
while not rospy.is_shutdown():
f = ac.get_wrist_force(r_arm)
print 'f:', f.A1
rospy.sleep(0.05)
# move the arms.
if False:
print 'hit a key to move the arms.'
k=m3t.get_keystroke()
rot_mat = tr.rotY(math.radians(-90))
p = np.matrix([0.3, -0.24, -0.3]).T
# q = arms.IK(l_arm, p, rot_mat)
# ac.go_jep(l_arm, q)
# ac.go_cep(l_arm, p, rot_mat)
ac.go_cep(r_arm, p, rot_mat)
# jep = ac.get_jep(r_arm)
# ac.go_jep(r_arm, jep)
rospy.sleep(0.5)
raw_input('Hit ENTER to print ee position')
q = ac.get_joint_angles(r_arm)
ee = ac.arms.FK(r_arm, q)
print 'ee:', ee.A1
print 'desired ee:', p.A1
if False:
print 'hit a key to float arms.'
k=m3t.get_keystroke()
ac.toggle_floating_arms()
print 'hit a key to UNfloat arms.'
k=m3t.get_keystroke()
ac.toggle_floating_arms()
#ac.move_till_hit(l_arm)
#ac.motors_off()
# ac.stop()
if False:
while not rospy.is_shutdown():
jep = ac.get_jep(r_arm)
print 'jep:', jep
rospy.sleep(0.05)
if True:
rospy.sleep(1.)
isc = ac.get_impedance_scale(r_arm)
print isc
isc[1] = 0.3
ac.set_impedance_scale(r_arm, isc)
rospy.sleep(1.)
isc = ac.get_impedance_scale(r_arm)
print isc
| [
[
1,
0,
0.0572,
0.0017,
0,
0.66,
0,
526,
0,
1,
0,
0,
526,
0,
0
],
[
1,
0,
0.0589,
0.0017,
0,
0.66,
0.0588,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.0607,
0.0017,
0,
... | [
"import math",
"import numpy as np",
"import copy",
"import sys, time, os",
"from threading import RLock",
"import roslib; roslib.load_manifest('epc_core')",
"import roslib; roslib.load_manifest('epc_core')",
"import rospy",
"import hrl_lib.viz as hv",
"from hrl_msgs.msg import FloatArray",
"fro... |
import numpy as np, math
import roslib; roslib.load_manifest('epc_core')
import rospy
from std_msgs.msg import Bool
## Class defining the core EPC function and a few simple examples.
# More complex behaviors that use EPC should have their own ROS
# packages.
class EPC():
def __init__(self, epc_name = 'epc'):
self.stop_epc = False
self.pause_epc = False
rospy.Subscriber('/'+epc_name+'/stop', Bool, self.stop_cb)
rospy.Subscriber('/'+epc_name+'/pause', Bool, self.pause_cb)
def stop_cb(self, msg):
self.stop_epc = msg.data
self.pause_epc = False # stop/start overrides pause.
def pause_cb(self, msg):
self.pause_epc = msg.data
##
# @param equi_pt_generator: function that returns stop, ea where ea: equilibrium angles and stop: string which is '' for epc motion to continue
# @param time_step: time between successive calls to equi_pt_generator
# @param timeout - time after which the epc motion will stop.
# @return stop (the string which has the reason why the epc
# motion stopped.), ea (last commanded equilibrium angles)
def epc_motion(self, ep_gen_func, ep_gen_state, time_step,
control_function=None, ep_clamp_func=None,
timeout=np.inf):
rt = rospy.Rate(1/time_step)
timeout_at = rospy.get_time() + timeout
stop = ''
ea = None
while stop == '':
if rospy.is_shutdown():
stop = 'rospy shutdown'
continue
if self.stop_epc:
stop = 'stop_command_over_ROS'
continue
if self.pause_epc:
rospy.sleep(0.1)
timeout_at += 0.101 # approximate.
continue
if timeout_at < rospy.get_time():
stop = 'timed out'
if stop == '':
stop, ea = equi_pt_generator(ep_gen_state)
if stop == 'reset timing':
stop = ''
t_end = rospy.get_time()
if stop == '':
if clamp_func != None:
ep = ea[0]
ea = list(ea)
ea[0] = ep_clamp_func(ep)
ea = tuple(ea)
control_function(*ea)
rt.sleep()
return stop, ea
| [
[
1,
0,
0.0263,
0.0132,
0,
0.66,
0,
954,
0,
2,
0,
0,
954,
0,
0
],
[
1,
0,
0.0395,
0.0132,
0,
0.66,
0.2,
796,
0,
1,
0,
0,
796,
0,
0
],
[
8,
0,
0.0395,
0.0132,
0,
0.6... | [
"import numpy as np, math",
"import roslib; roslib.load_manifest('epc_core')",
"import roslib; roslib.load_manifest('epc_core')",
"import rospy",
"from std_msgs.msg import Bool",
"class EPC():\n def __init__(self, epc_name = 'epc'):\n self.stop_epc = False\n self.pause_epc = False\n ... |
#
# Any robot that wants to use EPC should implement the functions
# sketched out in the HRLArm and HRLArmKinematics
#
import numpy as np, math
class HRLArm():
def __init__(self, kinematics):
# object of class derived from HRLArmKinematics
self.kinematics = kinematics
self.ep = None # equilibrium point
#------- abstract functions ---------
def get_joint_angles():
raise RuntimeError('Unimplemented Function')
def set_ep(self, *args):
raise RuntimeError('Unimplemented Function')
def get_ep(self):
raise RuntimeError('Unimplemented Function')
def viz_ep(self, ep):
raise RuntimeError('Unimplemented Function')
def freeze(self):
self.set_ep(self.get_ep())
def get_end_effector_pose(self):
return self.kinematics.FK(self.get_joint_angles())
class HRLArmKinematics():
def __init__(self):
self.tooltip_pos = np.matrix([0.,0.,0.]).T
self.tooltip_rot = np.matrix(np.eye(3))
# @param q - array-like (RADIANs)
# @param link_number - perform FK up to this link. (1-7)
# @return pos (3X1) np matrix, rot (3X3) np matrix
def FK(self, q, link_number=None):
raise RuntimeError('Unimplemented Function')
def IK(self, p, rot, q_guess=None):
raise RuntimeError('Unimplemented Function')
## compute Jacobian at point pos.
def Jacobian(self, q, pos=None):
raise RuntimeError('Unimplemented Function')
## compute Jacobian at point pos.
def jacobian(self, q, pos=None):
raise RuntimeError('Unimplemented Function')
## define tooltip as a 3x1 np matrix in the wrist coord frame.
def set_tooltip(self, arm, p, rot=np.matrix(np.eye(3))):
self.tooltip_pos = p
self.tooltip_rot = rot
| [
[
1,
0,
0.1094,
0.0156,
0,
0.66,
0,
954,
0,
2,
0,
0,
954,
0,
0
],
[
3,
0,
0.3359,
0.375,
0,
0.66,
0.5,
794,
0,
7,
0,
0,
0,
0,
8
],
[
2,
1,
0.1953,
0.0625,
1,
0.37,
... | [
"import numpy as np, math",
"class HRLArm():\n def __init__(self, kinematics):\n # object of class derived from HRLArmKinematics\n self.kinematics = kinematics\n self.ep = None # equilibrium point\n\n #------- abstract functions ---------\n def get_joint_angles():",
" def __init... |
#
# subscribe to thw joint angles and raw forces topics, and provide FK
# etc.
#
# Copyright (c) 2009, Georgia Tech Research Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Georgia Tech Research Corporation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY GEORGIA TECH RESEARCH CORPORATION ''AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL GEORGIA TECH BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Author: Advait Jain
import numpy as np, math
from threading import RLock, Timer
import sys, copy
import roslib; roslib.load_manifest('epc_core')
roslib.load_manifest('force_torque') # hack by Advait
import force_torque.FTClient as ftc
import tf
import hrl_lib.transforms as tr
import hrl_lib.viz as hv
import rospy
import PyKDL as kdl
import actionlib
from actionlib_msgs.msg import GoalStatus
from pr2_controllers_msgs.msg import JointTrajectoryAction, JointTrajectoryGoal, JointTrajectoryControllerState
from pr2_controllers_msgs.msg import Pr2GripperCommandGoal, Pr2GripperCommandAction, Pr2GripperCommand
from kinematics_msgs.srv import GetPositionIK, GetPositionIKRequest, GetPositionIKResponse
from kinematics_msgs.srv import GetPositionFK, GetPositionFKRequest, GetPositionFKResponse
from trajectory_msgs.msg import JointTrajectoryPoint
from geometry_msgs.msg import PoseStamped, Pose, Point, Quaternion
from teleop_controllers.msg import JTTeleopControllerState
from std_msgs.msg import Float64
from sensor_msgs.msg import JointState
import hrl_lib.transforms as tr
import hrl_lib.kdl_utils as ku
import time
from visualization_msgs.msg import Marker
node_name = "pr2_arms"
def log(str):
rospy.loginfo(node_name + ": " + str)
class PR2Arms():
def __init__(self, primary_ft_sensor=None):
log("Loading PR2Arms")
self.arms = PR2Arms_kdl() # KDL chain.
self.joint_names_list = [['r_shoulder_pan_joint',
'r_shoulder_lift_joint', 'r_upper_arm_roll_joint',
'r_elbow_flex_joint', 'r_forearm_roll_joint',
'r_wrist_flex_joint', 'r_wrist_roll_joint'],
['l_shoulder_pan_joint',
'l_shoulder_lift_joint', 'l_upper_arm_roll_joint',
'l_elbow_flex_joint', 'l_forearm_roll_joint',
'l_wrist_flex_joint', 'l_wrist_roll_joint']]
self.arm_state_lock = [RLock(), RLock()]
self.jep = [None, None]
self.arm_angles = [None, None]
self.torso_position = None
self.arm_efforts = [None, None]
self.r_arm_cart_pub = rospy.Publisher('/r_cart/command_pose', PoseStamped)
self.l_arm_cart_pub = rospy.Publisher('/l_cart/command_pose', PoseStamped)
rospy.Subscriber('/r_cart/state', JTTeleopControllerState, self.r_cart_state_cb)
rospy.Subscriber('/l_cart/state', JTTeleopControllerState, self.l_cart_state_cb)
rospy.Subscriber('/joint_states', JointState, self.joint_states_cb)
self.marker_pub = rospy.Publisher('/pr2_arms/viz_markers', Marker)
self.cep_marker_id = 1
self.r_arm_ftc = ftc.FTClient('force_torque_ft2')
self.r_arm_ftc_estimate = ftc.FTClient('force_torque_ft2_estimate')
self.tf_lstnr = tf.TransformListener()
if primary_ft_sensor == 'ati':
self.get_wrist_force = self.get_wrist_force_ati
if primary_ft_sensor == 'estimate':
self.get_wrist_force = self.get_wrist_force_estimate
r_action_client = actionlib.SimpleActionClient('r_arm_controller/joint_trajectory_action',
JointTrajectoryAction)
l_action_client = actionlib.SimpleActionClient('l_arm_controller/joint_trajectory_action',
JointTrajectoryAction)
self.joint_action_client = [r_action_client, l_action_client]
r_gripper_client = actionlib.SimpleActionClient('r_gripper_controller/gripper_action',
Pr2GripperCommandAction)
l_gripper_client = actionlib.SimpleActionClient('l_gripper_controller/gripper_action',
Pr2GripperCommandAction)
self.gripper_action_client = [r_gripper_client, l_gripper_client]
r_ik_srv = rospy.ServiceProxy('pr2_right_arm_kinematics/get_ik', GetPositionIK)
l_ik_srv = rospy.ServiceProxy('pr2_left_arm_kinematics/get_ik', GetPositionIK)
self.ik_srv = [r_ik_srv, l_ik_srv]
r_fk_srv = rospy.ServiceProxy('pr2_right_arm_kinematics/get_fk', GetPositionFK)
l_fk_srv = rospy.ServiceProxy('pr2_left_arm_kinematics/get_fk', GetPositionFK)
self.fk_srv = [r_fk_srv, l_fk_srv]
rospy.sleep(2.)
# self.joint_action_client[0].wait_for_server()
# self.joint_action_client[1].wait_for_server()
self.gripper_action_client[0].wait_for_server()
self.gripper_action_client[1].wait_for_server()
log("Finished loading SimpleArmManger")
##
# Callback for /joint_states topic. Updates current joint
# angles and efforts for the arms constantly
# @param data JointState message recieved from the /joint_states topic
def joint_states_cb(self, data):
arm_angles = [[], []]
arm_efforts = [[], []]
r_jt_idx_list = [0]*7
l_jt_idx_list = [0]*7
for i, jt_nm in enumerate(self.joint_names_list[0]):
r_jt_idx_list[i] = data.name.index(jt_nm)
for i, jt_nm in enumerate(self.joint_names_list[1]):
l_jt_idx_list[i] = data.name.index(jt_nm)
for i in range(7):
idx = r_jt_idx_list[i]
if data.name[idx] != self.joint_names_list[0][i]:
raise RuntimeError('joint angle name does not match. Expected: %s, Actual: %s i: %d'%('r_'+nm+'_joint', data.name[idx], i))
arm_angles[0].append(data.position[idx])
arm_efforts[0].append(data.effort[idx])
idx = l_jt_idx_list[i]
if data.name[idx] != self.joint_names_list[1][i]:
raise RuntimeError('joint angle name does not match. Expected: %s, Actual: %s i: %d'%('r_'+nm+'_joint', data.name[idx], i))
#ang = tr.angle_within_mod180(data.position[idx])
ang = data.position[idx]
arm_angles[1] += [ang]
arm_efforts[1] += [data.effort[idx]]
self.arm_state_lock[0].acquire()
self.arm_angles[0] = arm_angles[0]
self.arm_efforts[0] = arm_efforts[0]
torso_idx = data.name.index('torso_lift_joint')
self.torso_position = data.position[torso_idx]
self.arm_state_lock[0].release()
self.arm_state_lock[1].acquire()
self.arm_angles[1] = arm_angles[1]
self.arm_efforts[1] = arm_efforts[1]
self.arm_state_lock[1].release()
def r_cart_state_cb(self, msg):
try:
trans, quat = self.tf_lstnr.lookupTransform('/torso_lift_link',
'r_gripper_tool_frame', rospy.Time(0))
rot = tr.quaternion_to_matrix(quat)
tip = np.matrix([0.12, 0., 0.]).T
self.r_ee_pos = rot*tip + np.matrix(trans).T
self.r_ee_rot = rot
marker = Marker()
marker.header.frame_id = 'torso_lift_link'
time_stamp = rospy.Time.now()
marker.header.stamp = time_stamp
marker.ns = 'aloha land'
marker.type = Marker.SPHERE
marker.action = Marker.ADD
marker.pose.position.x = self.r_ee_pos[0,0]
marker.pose.position.y = self.r_ee_pos[1,0]
marker.pose.position.z = self.r_ee_pos[2,0]
size = 0.02
marker.scale.x = size
marker.scale.y = size
marker.scale.z = size
marker.lifetime = rospy.Duration()
marker.id = 71
marker.pose.orientation.x = 0
marker.pose.orientation.y = 0
marker.pose.orientation.z = 0
marker.pose.orientation.w = 1
color = (0.5, 0., 0.0)
marker.color.r = color[0]
marker.color.g = color[1]
marker.color.b = color[2]
marker.color.a = 1.
self.marker_pub.publish(marker)
ros_pt = msg.x_desi_filtered.pose.position
x, y, z = ros_pt.x, ros_pt.y, ros_pt.z
self.r_cep_pos = np.matrix([x, y, z]).T
pt = rot.T * (np.matrix([x,y,z]).T - np.matrix(trans).T)
pt = pt + tip
self.r_cep_pos_hooktip = rot*pt + np.matrix(trans).T
ros_quat = msg.x_desi_filtered.pose.orientation
quat = (ros_quat.x, ros_quat.y, ros_quat.z, ros_quat.w)
self.r_cep_rot = tr.quaternion_to_matrix(quat)
except:
return
def l_cart_state_cb(self, msg):
ros_pt = msg.x_desi_filtered.pose.position
self.l_cep_pos = np.matrix([ros_pt.x, ros_pt.y, ros_pt.z]).T
ros_quat = msg.x_desi_filtered.pose.orientation
quat = (ros_quat.x, ros_quat.y, ros_quat.z, ros_quat.w)
self.l_cep_rot = tr.quaternion_to_matrix(quat)
def IK(self, arm, pos, rot, q_guess=[0]*7):
ik_req = GetPositionIKRequest()
ik_req.timeout = rospy.Duration(5.)
if arm == 0:
ik_req.ik_request.ik_link_name = 'r_wrist_roll_link'
else:
ik_req.ik_request.ik_link_name = 'l_wrist_roll_link'
ik_req.ik_request.pose_stamped.header.frame_id = 'torso_lift_link'
quat = tr.matrix_to_quaternion(rot)
ik_req.ik_request.pose_stamped.pose = Pose(Point(*pos), Quaternion(*quat))
ik_req.ik_request.ik_seed_state.joint_state.position = q_guess
ik_req.ik_request.ik_seed_state.joint_state.name = self.joint_names_list[arm]
ik_resp = self.ik_srv[arm].call(ik_req)
return ik_resp.solution.joint_state.position
def FK(self, arm, q):
fk_req = GetPositionFKRequest()
fk_req.header.frame_id = 'torso_lift_link'
if arm == 0:
fk_req.fk_link_names.append('r_wrist_roll_link')
else:
fk_req.fk_link_names.append('l_wrist_roll_link')
fk_req.robot_state.joint_state.name = self.joint_names_list[arm]
fk_req.robot_state.joint_state.position = q
fk_resp = self.fk_srv[arm].call(fk_req)
if fk_resp.error_code.val == fk_resp.error_code.SUCCESS:
x = fk_resp.pose_stamped[0].pose.position.x
y = fk_resp.pose_stamped[0].pose.position.y
z = fk_resp.pose_stamped[0].pose.position.z
pos = [x, y, z]
q1 = fk_resp.pose_stamped[0].pose.orientation.x
q2 = fk_resp.pose_stamped[0].pose.orientation.y
q3 = fk_resp.pose_stamped[0].pose.orientation.z
q4 = fk_resp.pose_stamped[0].pose.orientation.w
quat = [q1,q2,q3,q4]
rot = tr.quaternion_to_matrix(quat)
else:
rospy.logerr('Forward kinematics failed')
return None, None
return pos, rot
## Returns the current position, rotation of the arm.
# @param arm 0 for right, 1 for left
# @return position, rotation
def end_effector_pos(self, arm):
q = self.get_joint_angles(arm)
return self.arms.FK_all(arm, q)
## Returns the list of 7 joint angles
# @param arm 0 for right, 1 for left
# @return list of 7 joint angles
def get_joint_angles(self, arm):
if arm != 1:
arm = 0
self.arm_state_lock[arm].acquire()
q = self.wrap_angles(self.arm_angles[arm])
self.arm_state_lock[arm].release()
return q
def set_jep(self, arm, q, duration=0.15):
if q is None or len(q) != 7:
raise RuntimeError("set_jep value is " + str(q))
self.arm_state_lock[arm].acquire()
jtg = JointTrajectoryGoal()
jtg.trajectory.joint_names = self.joint_names_list[arm]
jtp = JointTrajectoryPoint()
jtp.positions = q
#jtp.velocities = [0 for i in range(len(q))]
#jtp.accelerations = [0 for i in range(len(q))]
jtp.time_from_start = rospy.Duration(duration)
jtg.trajectory.points.append(jtp)
self.joint_action_client[arm].send_goal(jtg)
self.jep[arm] = q
cep, r = self.arms.FK_all(arm, q)
self.arm_state_lock[arm].release()
o = np.matrix([0.,0.,0.,1.]).T
cep_marker = hv.single_marker(cep, o, 'sphere',
'/torso_lift_link', color=(0., 0., 1., 1.),
scale = (0.02, 0.02, 0.02),
m_id = self.cep_marker_id)
cep_marker.header.stamp = rospy.Time.now()
self.marker_pub.publish(cep_marker)
def get_jep(self, arm):
self.arm_state_lock[arm].acquire()
jep = copy.copy(self.jep[arm])
self.arm_state_lock[arm].release()
return jep
def get_ee_jtt(self, arm):
if arm == 0:
return self.r_ee_pos, self.r_ee_rot
else:
return self.l_ee_pos, self.l_ee_rot
def get_cep_jtt(self, arm, hook_tip = False):
if arm == 0:
if hook_tip:
return self.r_cep_pos_hooktip, self.r_cep_rot
else:
return self.r_cep_pos, self.r_cep_rot
else:
return self.l_cep_pos, self.l_cep_rot
# set a cep using the Jacobian Transpose controller.
def set_cep_jtt(self, arm, p, rot=None):
if arm != 1:
arm = 0
ps = PoseStamped()
ps.header.stamp = rospy.rostime.get_rostime()
ps.header.frame_id = 'torso_lift_link'
ps.pose.position.x = p[0,0]
ps.pose.position.y = p[1,0]
ps.pose.position.z = p[2,0]
if rot == None:
if arm == 0:
rot = self.r_cep_rot
else:
rot = self.l_cep_rot
quat = tr.matrix_to_quaternion(rot)
ps.pose.orientation.x = quat[0]
ps.pose.orientation.y = quat[1]
ps.pose.orientation.z = quat[2]
ps.pose.orientation.w = quat[3]
if arm == 0:
self.r_arm_cart_pub.publish(ps)
else:
self.l_arm_cart_pub.publish(ps)
# rotational interpolation unimplemented.
def go_cep_jtt(self, arm, p):
step_size = 0.01
sleep_time = 0.1
cep_p, cep_rot = self.get_cep_jtt(arm)
unit_vec = (p-cep_p)
unit_vec = unit_vec / np.linalg.norm(unit_vec)
while np.linalg.norm(p-cep_p) > step_size:
cep_p += unit_vec * step_size
self.set_cep_jtt(arm, cep_p)
rospy.sleep(sleep_time)
self.set_cep_jtt(arm, p)
rospy.sleep(sleep_time)
#----------- forces ------------
# force that is being applied on the wrist. (estimate as returned
# by the cartesian controller)
def get_wrist_force_estimate(self, arm, bias = True, base_frame = False):
if arm != 0:
rospy.logerr('Unsupported arm: %d'%arm)
raise RuntimeError('Unimplemented function')
f = self.r_arm_ftc_estimate.read(without_bias = not bias)
f = f[0:3, :]
if base_frame:
trans, quat = self.tf_lstnr.lookupTransform('/torso_lift_link',
'/ft2_estimate', rospy.Time(0))
rot = tr.quaternion_to_matrix(quat)
f = rot * f
return -f # the negative is intentional (Advait, Nov 24. 2010.)
# force that is being applied on the wrist.
def get_wrist_force_ati(self, arm, bias = True, base_frame = False):
if arm != 0:
rospy.logerr('Unsupported arm: %d'%arm)
raise RuntimeError('Unimplemented function')
f = self.r_arm_ftc.read(without_bias = not bias)
f = f[0:3, :]
if base_frame:
trans, quat = self.tf_lstnr.lookupTransform('/torso_lift_link',
'/ft2', rospy.Time(0))
rot = tr.quaternion_to_matrix(quat)
f = rot * f
return -f # the negative is intentional (Advait, Nov 24. 2010.)
## Returns the list of 7 joint angles
# @param arm 0 for right, 1 for left
# @return list of 7 joint angles
def get_force_from_torques(self, arm):
if arm != 1:
arm = 0
self.arm_state_lock[arm].acquire()
q = self.arm_angles[arm]
tau = self.arm_efforts[arm]
self.arm_state_lock[arm].release()
p, _ = self.arms.FK_all(arm, q)
J = self.arms.Jacobian(arm, q, p)
f = np.linalg.pinv(J.T) * np.matrix(tau).T
f = f[0:3,:]
return -f
def bias_wrist_ft(self, arm):
if arm != 0:
rospy.logerr('Unsupported arm: %d'%arm)
raise RuntimeError('Unimplemented function')
self.r_arm_ftc.bias()
self.r_arm_ftc_estimate.bias()
#-------- gripper functions ------------
def move_gripper(self, arm, amount=0.08, effort = 15):
self.gripper_action_client[arm].send_goal(Pr2GripperCommandGoal(Pr2GripperCommand(position=amount,
max_effort = effort)))
## Open the gripper
# @param arm 0 for right, 1 for left
def open_gripper(self, arm):
self.move_gripper(arm, 0.08, -1)
## Close the gripper
# @param arm 0 for right, 1 for left
def close_gripper(self, arm, effort = 15):
self.move_gripper(arm, 0.0, effort)
def wrap_angles(self, q):
for ind in [4, 6]:
while q[ind] < -np.pi:
q[ind] += 2*np.pi
while q[ind] > np.pi:
q[ind] -= 2*np.pi
return q
##
# using KDL for pr2 arm kinematics.
class PR2Arms_kdl():
def __init__(self):
self.right_chain = self.create_right_chain()
fk, ik_v, ik_p, jac = self.create_solvers(self.right_chain)
self.right_fk = fk
self.right_ik_v = ik_v
self.right_ik_p = ik_p
self.right_jac = jac
self.right_tooltip = np.matrix([0.,0.,0.]).T
def create_right_chain(self):
ch = kdl.Chain()
self.right_arm_base_offset_from_torso_lift_link = np.matrix([0., -0.188, 0.]).T
# shoulder pan
ch.addSegment(kdl.Segment(kdl.Joint(kdl.Joint.RotZ),kdl.Frame(kdl.Vector(0.1,0.,0.))))
# shoulder lift
ch.addSegment(kdl.Segment(kdl.Joint(kdl.Joint.RotY),kdl.Frame(kdl.Vector(0.,0.,0.))))
# upper arm roll
ch.addSegment(kdl.Segment(kdl.Joint(kdl.Joint.RotX),kdl.Frame(kdl.Vector(0.4,0.,0.))))
# elbox flex
ch.addSegment(kdl.Segment(kdl.Joint(kdl.Joint.RotY),kdl.Frame(kdl.Vector(0.0,0.,0.))))
# forearm roll
ch.addSegment(kdl.Segment(kdl.Joint(kdl.Joint.RotX),kdl.Frame(kdl.Vector(0.321,0.,0.))))
# wrist flex
ch.addSegment(kdl.Segment(kdl.Joint(kdl.Joint.RotY),kdl.Frame(kdl.Vector(0.,0.,0.))))
# wrist roll
ch.addSegment(kdl.Segment(kdl.Joint(kdl.Joint.RotX),kdl.Frame(kdl.Vector(0.,0.,0.))))
return ch
def create_solvers(self, ch):
fk = kdl.ChainFkSolverPos_recursive(ch)
ik_v = kdl.ChainIkSolverVel_pinv(ch)
ik_p = kdl.ChainIkSolverPos_NR(ch, fk, ik_v)
jac = kdl.ChainJntToJacSolver(ch)
return fk, ik_v, ik_p, jac
## define tooltip as a 3x1 np matrix in the wrist coord frame.
def set_tooltip(self, arm, p):
if arm == 0:
self.right_tooltip = p
else:
rospy.logerr('Arm %d is not supported.'%(arm))
def FK_kdl(self, arm, q, link_number):
if arm == 0:
fk = self.right_fk
endeffec_frame = kdl.Frame()
kinematics_status = fk.JntToCart(q, endeffec_frame,
link_number)
if kinematics_status >= 0:
return endeffec_frame
else:
rospy.loginfo('Could not compute forward kinematics.')
return None
else:
msg = '%s arm not supported.'%(arm)
rospy.logerr(msg)
raise RuntimeError(msg)
## returns point in torso lift link.
def FK_all(self, arm, q, link_number = 7):
q = self.pr2_to_kdl(q)
frame = self.FK_kdl(arm, q, link_number)
pos = frame.p
pos = ku.kdl_vec_to_np(pos)
pos = pos + self.right_arm_base_offset_from_torso_lift_link
m = frame.M
rot = ku.kdl_rot_to_np(m)
if arm == 0:
tooltip_baseframe = rot * self.right_tooltip
pos += tooltip_baseframe
else:
rospy.logerr('Arm %d is not supported.'%(arm))
return None
return pos, rot
def kdl_to_pr2(self, q):
if q == None:
return None
q_pr2 = [0] * 7
q_pr2[0] = q[0]
q_pr2[1] = q[1]
q_pr2[2] = q[2]
q_pr2[3] = q[3]
q_pr2[4] = q[4]
q_pr2[5] = q[5]
q_pr2[6] = q[6]
return q_pr2
def pr2_to_kdl(self, q):
if q == None:
return None
n = len(q)
q_kdl = kdl.JntArray(n)
for i in range(n):
q_kdl[i] = q[i]
return q_kdl
def Jac_kdl(self, arm, q):
J_kdl = kdl.Jacobian(7)
if arm != 0:
rospy.logerr('Unsupported arm: '+ str(arm))
return None
self.right_jac.JntToJac(q,J_kdl)
kdl_jac = np.matrix([
[J_kdl[0,0],J_kdl[0,1],J_kdl[0,2],J_kdl[0,3],J_kdl[0,4],J_kdl[0,5],J_kdl[0,6]],
[J_kdl[1,0],J_kdl[1,1],J_kdl[1,2],J_kdl[1,3],J_kdl[1,4],J_kdl[1,5],J_kdl[1,6]],
[J_kdl[2,0],J_kdl[2,1],J_kdl[2,2],J_kdl[2,3],J_kdl[2,4],J_kdl[2,5],J_kdl[2,6]],
[J_kdl[3,0],J_kdl[3,1],J_kdl[3,2],J_kdl[3,3],J_kdl[3,4],J_kdl[3,5],J_kdl[3,6]],
[J_kdl[4,0],J_kdl[4,1],J_kdl[4,2],J_kdl[4,3],J_kdl[4,4],J_kdl[4,5],J_kdl[4,6]],
[J_kdl[5,0],J_kdl[5,1],J_kdl[5,2],J_kdl[5,3],J_kdl[5,4],J_kdl[5,5],J_kdl[5,6]],
])
return kdl_jac
## compute Jacobian (at wrist).
# @param arm - 0 or 1
# @param q - list of 7 joint angles.
# @return 6x7 np matrix
def Jac(self, arm, q):
rospy.logerr('Jac only works for getting the Jacobian at the wrist. Use Jacobian to get the Jacobian at a general location.')
jntarr = self.pr2_to_kdl(q)
kdl_jac = self.Jac_kdl(arm, jntarr)
pr2_jac = kdl_jac
return pr2_jac
## compute Jacobian at point pos.
# p is in the torso_lift_link coord frame.
# this is wrapper function
def jacobian(self, arm, q, pos):
return self.Jacobian(arm, q, pos)
## compute Jacobian at point pos.
# p is in the torso_lift_link coord frame.
def Jacobian(self, arm, q, pos):
if arm != 0:
rospy.logerr('Arm %d is not supported.'%(arm))
return None
tooltip = self.right_tooltip
self.right_tooltip = np.matrix([0.,0.,0.]).T
v_list = []
w_list = []
for i in range(7):
p, rot = self.FK_all(arm, q, i)
r = pos - p
z_idx = self.right_chain.getSegment(i).getJoint().getType() - 1
z = rot[:, z_idx]
v_list.append(np.matrix(np.cross(z.A1, r.A1)).T)
w_list.append(z)
J = np.row_stack((np.column_stack(v_list), np.column_stack(w_list)))
self.right_tooltip = tooltip
return J
def close_to_singularity(self, arm, q):
pass
def within_joint_limits(self, arm, q, delta_list=[0., 0., 0., 0., 0., 0., 0.]):
if arm == 0: # right arm
min_arr = np.radians(np.array([-109., -24, -220, -132, -np.inf, -120, -np.inf]))
#max_arr = np.radians(np.array([26., 68, 41, 0, np.inf, 0, np.inf]))
max_arr = np.radians(np.array([26., 68, 41, 5, np.inf, 5, np.inf])) # 5 to prevent singularity. Need to come up with a better solution.
else:
raise RuntimeError('within_joint_limits unimplemented for left arm')
q_arr = np.array(q)
d_arr = np.array(delta_list)
return np.all((q_arr <= max_arr+d_arr, q_arr >= min_arr-d_arr))
if __name__ == '__main__':
from visualization_msgs.msg import Marker
import hrl_lib.viz as hv
rospy.init_node('pr2_arms_test')
pr2_arms = PR2Arms()
pr2_kdl = PR2Arms_kdl()
r_arm, l_arm = 0, 1
arm = r_arm
if False:
np.set_printoptions(precision=2, suppress=True)
while not rospy.is_shutdown():
q = pr2_arms.get_joint_angles(arm)
print 'q in degrees:', np.degrees(q)
rospy.sleep(0.1)
if False:
jep = [0.] * 7
rospy.loginfo('Going to home location.')
raw_input('Hit ENTER to go')
pr2_arms.set_jep(arm, jep, duration=2.)
if False:
# testing FK by publishing a frame marker.
marker_pub = rospy.Publisher('/pr2_kdl/ee_marker', Marker)
pr2_kdl.set_tooltip(arm, np.matrix([0.15, 0., 0.]).T)
rt = rospy.Rate(100)
rospy.loginfo('Starting the maker publishing loop.')
while not rospy.is_shutdown():
q = pr2_arms.get_joint_angles(arm)
p, rot = pr2_kdl.FK_all(arm, q)
m = hv.create_frame_marker(p, rot, 0.15, '/torso_lift_link')
m.header.stamp = rospy.Time.now()
marker_pub.publish(m)
rt.sleep()
if False:
# testing Jacobian by printing KDL and my own Jacobian at the
# current configuration.
while not rospy.is_shutdown():
q = pr2_arms.get_joint_angles(arm)
J_kdl = pr2_kdl.Jac(arm , q)
p, rot = pr2_kdl.FK_all(arm, q)
J_adv = pr2_kdl.Jacobian(arm, q, p)
print J_adv.shape
diff_J = J_kdl - J_adv
print 'difference between KDL and adv is:'
print diff_J
print 'Norm of difference matrix:', np.linalg.norm(diff_J)
raw_input('Move arm into some configuration and hit enter to get the Jacobian.')
if True:
while not rospy.is_shutdown():
q = pr2_arms.wrap_angles(pr2_arms.get_joint_angles(arm))
print "actual", q
p_ros, rot_ros = pr2_arms.FK(arm, q)
p_kdl, rot_kdl = pr2_kdl.FK_all(arm, q)
ik_ros = pr2_arms.IK(r_arm, p_ros, rot_ros, q)
ik_kdl = pr2_arms.IK(r_arm, p_kdl, rot_kdl, q)
diff = np.array(ik_ros) - np.array(ik_kdl)
print "IK ros", ik_ros
print "IK kdl", ik_kdl
if len(ik_ros) == 7:
err_ros = np.array(q) - np.array(ik_ros)
err_kdl = np.array(q) - np.array(ik_kdl)
print "err ros", sum(err_ros**2), "err kdl", sum(err_kdl**2), "diff", sum(diff**2)
| [
[
1,
0,
0.0446,
0.0014,
0,
0.66,
0,
954,
0,
2,
0,
0,
954,
0,
0
],
[
1,
0,
0.0459,
0.0014,
0,
0.66,
0.0323,
83,
0,
2,
0,
0,
83,
0,
0
],
[
1,
0,
0.0473,
0.0014,
0,
0.... | [
"import numpy as np, math",
"from threading import RLock, Timer",
"import sys, copy",
"import roslib; roslib.load_manifest('epc_core')",
"import roslib; roslib.load_manifest('epc_core')",
"roslib.load_manifest('force_torque') # hack by Advait",
"import force_torque.FTClient as ftc",
"import tf",
"im... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.